2023-06-06 21:18:40 +02:00
package wmo
import (
"context"
2023-06-08 16:26:06 +02:00
"go.mongodb.org/mongo-driver/bson/bsontype"
2023-06-06 21:18:40 +02:00
"go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
2023-08-21 15:08:35 +02:00
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
2023-06-06 21:18:40 +02:00
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
)
2023-06-08 16:26:06 +02:00
type EntityID interface {
MarshalBSONValue ( ) ( bsontype . Type , [ ] byte , error )
String ( ) string
}
2023-06-06 21:24:13 +02:00
2023-06-10 16:22:14 +02:00
type Decodable interface {
Decode ( v any ) error
}
type Cursorable interface {
Decode ( v any ) error
Err ( ) error
Close ( ctx context . Context ) error
All ( ctx context . Context , results any ) error
RemainingBatchLength ( ) int
Next ( ctx context . Context ) bool
}
2023-06-10 18:35:56 +02:00
type fullTypeRef struct {
2023-06-06 21:18:40 +02:00
IsPointer bool
Kind reflect . Kind
2023-06-07 16:58:17 +02:00
RealType reflect . Type
2023-06-06 21:18:40 +02:00
Type reflect . Type
UnderlyingType reflect . Type
Name string
Index [ ] int
}
2024-03-04 12:17:10 +01:00
type IColl interface {
Collection ( ) * mongo . Collection
Name ( ) string
Indexes ( ) mongo . IndexView
Drop ( ctx context . Context ) error
}
2023-06-06 21:18:40 +02:00
type Coll [ TData any ] struct {
2023-06-10 18:35:56 +02:00
coll * mongo . Collection // internal mongo collection, access via Collection()
dataTypeMap map [ string ] fullTypeRef // list of TData fields (only if TData is not an interface)
implDataTypeMap map [ reflect . Type ] map [ string ] fullTypeRef // dynamic list of fields of TData implementations (only if TData is an interface)
customDecoder * func ( ctx context . Context , dec Decodable ) ( TData , error ) // custom decoding function (useful if TData is an interface)
isInterfaceDataType bool // true if TData is an interface (not a struct)
2023-11-04 18:55:44 +01:00
unmarshalHooks [ ] func ( d TData ) TData // called for every object after unmarshalling
2024-05-12 16:45:45 +02:00
marshalHooks [ ] func ( d TData ) TData // called for every object before marshalling
2024-01-09 08:51:46 +01:00
extraModPipeline [ ] func ( ctx context . Context ) mongo . Pipeline // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc
2023-06-06 21:18:40 +02:00
}
func ( c * Coll [ TData ] ) Collection ( ) * mongo . Collection {
return c . coll
}
func ( c * Coll [ TData ] ) Name ( ) string {
return c . coll . Name ( )
}
func ( c * Coll [ TData ] ) Indexes ( ) mongo . IndexView {
return c . coll . Indexes ( )
}
func ( c * Coll [ TData ] ) Drop ( ctx context . Context ) error {
2023-08-21 15:08:35 +02:00
err := c . coll . Drop ( ctx )
if err != nil {
return exerr . Wrap ( err , "failed to drop collection" ) . Str ( "collection" , c . Name ( ) ) . Build ( )
}
return nil
2023-06-06 21:18:40 +02:00
}
2023-11-04 18:55:44 +01:00
func ( c * Coll [ TData ] ) WithDecodeFunc ( cdf func ( ctx context . Context , dec Decodable ) ( TData , error ) , example TData ) * Coll [ TData ] {
c . EnsureInitializedReflection ( example )
c . customDecoder = langext . Ptr ( cdf )
return c
}
2024-05-12 16:45:45 +02:00
// WithUnmarshalHook
// function that is called for every object after reading from DB
2023-11-04 18:55:44 +01:00
func ( c * Coll [ TData ] ) WithUnmarshalHook ( fn func ( d TData ) TData ) * Coll [ TData ] {
c . unmarshalHooks = append ( c . unmarshalHooks , fn )
return c
}
2024-05-12 16:45:45 +02:00
// WithMarshalHook
// function that is called for every object before writing to DB
func ( c * Coll [ TData ] ) WithMarshalHook ( fn func ( d TData ) TData ) * Coll [ TData ] {
2024-05-12 16:51:52 +02:00
c . marshalHooks = append ( c . marshalHooks , fn )
2024-05-12 16:45:45 +02:00
return c
}
// WithModifyingPipeline
// pipeline that is appended to all read operations (after filtering)
2023-11-09 09:26:46 +01:00
func ( c * Coll [ TData ] ) WithModifyingPipeline ( p mongo . Pipeline ) * Coll [ TData ] {
2024-01-09 08:51:46 +01:00
c . extraModPipeline = append ( c . extraModPipeline , func ( ctx context . Context ) mongo . Pipeline { return p } )
return c
}
2024-05-12 16:45:45 +02:00
// WithModifyingPipelineFunc
// pipeline that is appended to all read operations (after filtering)
2024-01-09 08:51:46 +01:00
func ( c * Coll [ TData ] ) WithModifyingPipelineFunc ( fn func ( ctx context . Context ) mongo . Pipeline ) * Coll [ TData ] {
c . extraModPipeline = append ( c . extraModPipeline , fn )
2023-11-09 09:26:46 +01:00
return c
}
2023-06-06 21:18:40 +02:00
func ( c * Coll [ TData ] ) createToken ( fieldPrimary string , dirPrimary ct . SortDirection , fieldSecondary * string , dirSecondary * ct . SortDirection , lastEntity TData , pageSize * int ) ( ct . CursorToken , error ) {
valuePrimary , err := c . getFieldValueAsTokenString ( lastEntity , fieldPrimary )
if err != nil {
2023-08-21 15:08:35 +02:00
return ct . CursorToken { } , exerr . Wrap ( err , "failed to get (primary) field-value as token-string" ) . Type ( "lastEntity" , lastEntity ) . Str ( "fieldPrimary" , fieldPrimary ) . Build ( )
2023-06-06 21:18:40 +02:00
}
valueSeconary := ""
if fieldSecondary != nil && dirSecondary != nil {
valueSeconary , err = c . getFieldValueAsTokenString ( lastEntity , * fieldSecondary )
if err != nil {
2023-08-21 15:08:35 +02:00
return ct . CursorToken { } , exerr . Wrap ( err , "failed to get (secondary) field-value as token-string" ) . Type ( "lastEntity" , lastEntity ) . StrPtr ( "fieldSecondary" , fieldSecondary ) . Build ( )
2023-06-06 21:18:40 +02:00
}
}
return ct . CursorToken {
Mode : ct . CTMNormal ,
ValuePrimary : valuePrimary ,
ValueSecondary : valueSeconary ,
Direction : dirPrimary ,
PageSize : langext . Coalesce ( pageSize , 0 ) ,
Extra : ct . Extra { } ,
} , nil
}
2024-04-23 16:12:17 +02:00
func ( c * Coll [ TData ] ) needsDoubleSort ( ctx context . Context ) bool {
for _ , ppl := range c . extraModPipeline {
for _ , stage := range ppl ( ctx ) {
for _ , bsone := range stage {
if bsone . Key == "$group" {
// a group stage in extraModPipeline results in unsorted data, which means the caller must sort again after these pipeline stages...
return true
}
}
}
}
return false
}