From c338d230700668591d430358c55809e5c61e69a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mike=20Schw=C3=B6rer?= Date: Tue, 6 Jun 2023 21:18:40 +0200 Subject: [PATCH] v0.0.133 --- README.md | 34 ++++- TODO.md | 13 ++ cursortoken/direction.go | 8 ++ cursortoken/filter.go | 10 ++ cursortoken/token.go | 184 ++++++++++++++++++++++++ go.mod | 16 ++- go.sum | 27 +++- langext/reflection.go | 27 +++- rfctime/interface_test.go | 1 + wmo/mongo.go | 11 ++ wmo/pagination.go | 69 +++++++++ wmo/reflection.go | 294 ++++++++++++++++++++++++++++++++++++++ wmo/reflection_test.go | 160 +++++++++++++++++++++ wmo/wrapper.go | 231 ++++++++++++++++++++++++++++++ 14 files changed, 1076 insertions(+), 9 deletions(-) create mode 100644 TODO.md create mode 100644 cursortoken/direction.go create mode 100644 cursortoken/filter.go create mode 100644 cursortoken/token.go create mode 100644 wmo/mongo.go create mode 100644 wmo/pagination.go create mode 100644 wmo/reflection.go create mode 100644 wmo/reflection_test.go create mode 100644 wmo/wrapper.go diff --git a/README.md b/README.md index 586ab92..9ba0cbb 100644 --- a/README.md +++ b/README.md @@ -5,4 +5,36 @@ A collection of general & useful library methods This should not have any heavy dependencies (gin, mongo, etc) and add missing basic language features... -Potentially needs `export GOPRIVATE="gogs.mikescher.com"` \ No newline at end of file +Potentially needs `export GOPRIVATE="gogs.mikescher.com"` + + +### Packages: + +| Name | Maintainer | Description | +|-------------|------------|---------------------------------------------------------------------------------------------------------------| +| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) | +| mathext | Mike | Utility/Helper functions for math | +| cryptext | Mike | Utility/Helper functions for encryption | +| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels | +| dataext | Mike | Various useful data structures | +| zipext | Mike | Utility for zip/gzip/tar etc | +| | | | +| mongoext | Mike | Utility/Helper functions for mongodb | +| cursortoken | Mike | MongoDB cursortoken implementation | +| | | | +| totpext | Mike | Implementation of TOTP (2-Factor-Auth) | +| termext | Mike | Utilities for terminals (mostly color output) | +| confext | Mike | Parses environment configuration into structs | +| cmdext | Mike | Runner for external commands/processes | +| | | | +| sq | Mike | Utility functions for sql based databases | +| tst | Mike | Utility functions for unit tests | +| | | | +| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json | +| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps | +| | | | +| bfcodegen | Mike | Various codegen tools (run via go generate) | +| | | | +| rext | Mike | Regex Wrapper, wraps regexp with a better interface | +| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface | +| | | | \ No newline at end of file diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..399b5f7 --- /dev/null +++ b/TODO.md @@ -0,0 +1,13 @@ + + + - cronext + + - cursortoken + + - typed/geenric mongo wrapper + + - error package + +- rfctime.DateOnly +- rfctime.HMSTimeOnly +- rfctime.NanoTimeOnly \ No newline at end of file diff --git a/cursortoken/direction.go b/cursortoken/direction.go new file mode 100644 index 0000000..9ed91bc --- /dev/null +++ b/cursortoken/direction.go @@ -0,0 +1,8 @@ +package cursortoken + +type SortDirection string //@enum:type + +const ( + SortASC SortDirection = "ASC" + SortDESC SortDirection = "DESC" +) diff --git a/cursortoken/filter.go b/cursortoken/filter.go new file mode 100644 index 0000000..d750fe1 --- /dev/null +++ b/cursortoken/filter.go @@ -0,0 +1,10 @@ +package cursortoken + +import ( + "go.mongodb.org/mongo-driver/mongo" +) + +type Filter interface { + FilterQuery() mongo.Pipeline + Pagination() (string, SortDirection, *string, *SortDirection) +} diff --git a/cursortoken/token.go b/cursortoken/token.go new file mode 100644 index 0000000..f3f95bb --- /dev/null +++ b/cursortoken/token.go @@ -0,0 +1,184 @@ +package cursortoken + +import ( + "encoding/base32" + "encoding/json" + "errors" + "go.mongodb.org/mongo-driver/bson/primitive" + "strings" + "time" +) + +type Mode string + +const ( + CTMStart Mode = "START" + CTMNormal Mode = "NORMAL" + CTMEnd Mode = "END" +) + +type Extra struct { + Timestamp *time.Time + Id *string + Page *int + PageSize *int +} + +type CursorToken struct { + Mode Mode + ValuePrimary string + ValueSecondary string + Direction SortDirection + DirectionSecondary SortDirection + PageSize int + Extra Extra +} + +type cursorTokenSerialize struct { + ValuePrimary *string `json:"v1,omitempty"` + ValueSecondary *string `json:"v2,omitempty"` + Direction *SortDirection `json:"dir,omitempty"` + DirectionSecondary *SortDirection `json:"dir2,omitempty"` + PageSize *int `json:"size,omitempty"` + + ExtraTimestamp *time.Time `json:"ts,omitempty"` + ExtraId *string `json:"id,omitempty"` + ExtraPage *int `json:"pg,omitempty"` + ExtraPageSize *int `json:"sz,omitempty"` +} + +func Start() CursorToken { + return CursorToken{ + Mode: CTMStart, + ValuePrimary: "", + ValueSecondary: "", + Direction: "", + DirectionSecondary: "", + PageSize: 0, + Extra: Extra{}, + } +} + +func End() CursorToken { + return CursorToken{ + Mode: CTMEnd, + ValuePrimary: "", + ValueSecondary: "", + Direction: "", + DirectionSecondary: "", + PageSize: 0, + Extra: Extra{}, + } +} + +func (c *CursorToken) Token() string { + if c.Mode == CTMStart { + return "@start" + } + if c.Mode == CTMEnd { + return "@end" + } + + // We kinda manually implement omitempty for the CursorToken here + // because omitempty does not work for time.Time and otherwise we would always + // get weird time values when decoding a token that initially didn't have an Timestamp set + // For this usecase we treat Unix=0 as an empty timestamp + + sertok := cursorTokenSerialize{} + + if c.ValuePrimary != "" { + sertok.ValuePrimary = &c.ValuePrimary + } + if c.ValueSecondary != "" { + sertok.ValueSecondary = &c.ValueSecondary + } + if c.Direction != "" { + sertok.Direction = &c.Direction + } + if c.DirectionSecondary != "" { + sertok.DirectionSecondary = &c.DirectionSecondary + } + if c.PageSize != 0 { + sertok.PageSize = &c.PageSize + } + + sertok.ExtraTimestamp = c.Extra.Timestamp + sertok.ExtraId = c.Extra.Id + sertok.ExtraPage = c.Extra.Page + sertok.ExtraPageSize = c.Extra.PageSize + + body, err := json.Marshal(sertok) + if err != nil { + panic(err) + } + + return "tok_" + base32.StdEncoding.EncodeToString(body) +} + +func Decode(tok string) (CursorToken, error) { + if tok == "" { + return Start(), nil + } + if strings.ToLower(tok) == "@start" { + return Start(), nil + } + if strings.ToLower(tok) == "@end" { + return End(), nil + } + + if !strings.HasPrefix(tok, "tok_") { + return CursorToken{}, errors.New("could not decode token, missing prefix") + } + + body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):]) + if err != nil { + return CursorToken{}, err + } + + var tokenDeserialize cursorTokenSerialize + err = json.Unmarshal(body, &tokenDeserialize) + if err != nil { + return CursorToken{}, err + } + + token := CursorToken{Mode: CTMNormal} + + if tokenDeserialize.ValuePrimary != nil { + token.ValuePrimary = *tokenDeserialize.ValuePrimary + } + if tokenDeserialize.ValueSecondary != nil { + token.ValueSecondary = *tokenDeserialize.ValueSecondary + } + if tokenDeserialize.Direction != nil { + token.Direction = *tokenDeserialize.Direction + } + if tokenDeserialize.DirectionSecondary != nil { + token.DirectionSecondary = *tokenDeserialize.DirectionSecondary + } + if tokenDeserialize.PageSize != nil { + token.PageSize = *tokenDeserialize.PageSize + } + + token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp + token.Extra.Id = tokenDeserialize.ExtraId + token.Extra.Page = tokenDeserialize.ExtraPage + token.Extra.PageSize = tokenDeserialize.ExtraPageSize + + return token, nil +} + +func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) { + if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil { + return oid, true + } else { + return primitive.ObjectID{}, false + } +} + +func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) { + if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil { + return oid, true + } else { + return primitive.ObjectID{}, false + } +} diff --git a/go.mod b/go.mod index f1de01a..0eaf9fe 100644 --- a/go.mod +++ b/go.mod @@ -3,12 +3,22 @@ module gogs.mikescher.com/BlackForestBytes/goext go 1.19 require ( + github.com/jmoiron/sqlx v1.3.5 + go.mongodb.org/mongo-driver v1.11.1 + golang.org/x/crypto v0.4.0 golang.org/x/sys v0.3.0 golang.org/x/term v0.3.0 ) require ( - github.com/jmoiron/sqlx v1.3.5 // indirect - go.mongodb.org/mongo-driver v1.11.1 // indirect - golang.org/x/crypto v0.4.0 // indirect + github.com/golang/snappy v0.0.1 // indirect + github.com/klauspost/compress v1.13.6 // indirect + github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.1 // indirect + github.com/xdg-go/stringprep v1.0.3 // indirect + github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect + golang.org/x/text v0.5.0 // indirect ) diff --git a/go.sum b/go.sum index 4b096c3..9672f74 100644 --- a/go.sum +++ b/go.sum @@ -1,25 +1,43 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= +github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E= github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= +github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs= github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= go.mongodb.org/mongo-driver v1.11.1 h1:QP0znIRTuL0jf1oBQoAoM0C6ZJfBK4kx0Uumtv1A7w8= go.mongodb.org/mongo-driver v1.11.1/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8= @@ -27,24 +45,25 @@ golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8= golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw= -golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI= golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/langext/reflection.go b/langext/reflection.go index 488631c..30e6157 100644 --- a/langext/reflection.go +++ b/langext/reflection.go @@ -4,7 +4,7 @@ import ( "reflect" ) -var reflectBasicTypes = []reflect.Type{ +var reflectBasicTypes = map[reflect.Kind]reflect.Type{ reflect.Bool: reflect.TypeOf(false), reflect.Int: reflect.TypeOf(int(0)), reflect.Int8: reflect.TypeOf(int8(0)), @@ -109,3 +109,28 @@ func TryCast[T any](v any) (T, bool) { return r4, true } + +func TryCastType(v any, dest reflect.Type) (any, bool) { + + underlying := Underlying(reflect.TypeOf(v)) + + if underlying != Underlying(dest) { + return nil, false + } + + r1 := reflect.ValueOf(v) + + if !r1.CanConvert(underlying) { + return nil, false + } + + r2 := r1.Convert(underlying) + + if !r2.CanConvert(dest) { + return nil, false + } + + r4 := r2.Convert(dest) + + return r4.Interface(), true +} diff --git a/rfctime/interface_test.go b/rfctime/interface_test.go index b89d588..d8d0c5c 100644 --- a/rfctime/interface_test.go +++ b/rfctime/interface_test.go @@ -1,6 +1,7 @@ package rfctime import ( + "gogs.mikescher.com/BlackForestBytes/goext/tst" "testing" "time" ) diff --git a/wmo/mongo.go b/wmo/mongo.go new file mode 100644 index 0000000..67c3cd8 --- /dev/null +++ b/wmo/mongo.go @@ -0,0 +1,11 @@ +package wmo + +import "go.mongodb.org/mongo-driver/mongo" + +func W[TData any](collection *mongo.Collection) *Coll[TData] { + c := Coll[TData]{coll: collection} + + c.init() + + return &c +} diff --git a/wmo/pagination.go b/wmo/pagination.go new file mode 100644 index 0000000..ad60859 --- /dev/null +++ b/wmo/pagination.go @@ -0,0 +1,69 @@ +package wmo + +import ( + "go.mongodb.org/mongo-driver/bson" + ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" +) + +func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { + + cond := bson.A{} + sort := bson.D{} + + valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) + if err != nil { + return nil, err + } + + if sortPrimary == ct.SortASC { + // We sort ASC on - so we want all entries newer ($gt) than the $primary + cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) + sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) + } else if sortPrimary == ct.SortDESC { + // We sort DESC on - so we want all entries older ($lt) than the $primary + cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) + sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) + } + + if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { + + valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) + if err != nil { + return nil, err + } + + if *sortSecondary == ct.SortASC { + + // the conflict-resolution condition, for entries with the _same_ as the $primary we take the ones with a greater $secondary (= newer) + cond = append(cond, bson.M{"$and": bson.A{ + bson.M{fieldPrimary: valuePrimary}, + bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, + }}) + + sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) + + } else if *sortSecondary == ct.SortDESC { + + // the conflict-resolution condition, for entries with the _same_ as the $primary we take the ones with a smaller $secondary (= older) + cond = append(cond, bson.M{"$and": bson.A{ + bson.M{fieldPrimary: valuePrimary}, + bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, + }}) + + sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) + + } + } + + pipeline := make([]bson.D, 0, 3) + + pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) + + pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) + + if pageSize != nil { + pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) + } + + return pipeline, nil +} diff --git a/wmo/reflection.go b/wmo/reflection.go new file mode 100644 index 0000000..73ab171 --- /dev/null +++ b/wmo/reflection.go @@ -0,0 +1,294 @@ +package wmo + +import ( + "errors" + "fmt" + "gogs.mikescher.com/BlackForestBytes/goext/langext" + "reflect" + "strconv" + "time" +) + +func (c *Coll[TData]) init() { + + c.dataTypeMap = make(map[string]fullTypeRef[TData]) + + example := *new(TData) + + v := reflect.ValueOf(example) + + c.initFields("", v, make([]int, 0)) + +} + +func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, idxarr []int) { + + rtyp := rval.Type() + + for i := 0; i < rtyp.NumField(); i++ { + + rsfield := rtyp.Field(i) + rvfield := rval.Field(i) + + if !rsfield.IsExported() { + continue + } + + bsonkey, found := rsfield.Tag.Lookup("bson") + if !found || bsonkey == "-" { + continue + } + + fullKey := prefix + bsonkey + + newIdxArr := langext.ArrCopy(idxarr) + newIdxArr = append(newIdxArr, i) + + if rvfield.Type().Kind() == reflect.Pointer { + + c.dataTypeMap[fullKey] = fullTypeRef[TData]{ + IsPointer: true, + Kind: rvfield.Type().Elem().Kind(), + Type: rvfield.Type().Elem(), + UnderlyingType: langext.Underlying(rvfield.Type().Elem()), + Name: rsfield.Name, + Index: newIdxArr, + } + + } else { + + c.dataTypeMap[fullKey] = fullTypeRef[TData]{ + IsPointer: false, + Kind: rvfield.Type().Kind(), + Type: rvfield.Type(), + UnderlyingType: langext.Underlying(rvfield.Type()), + Name: rsfield.Name, + Index: newIdxArr, + } + + } + + if rvfield.Kind() == reflect.Struct { + c.initFields(fullKey+".", rvfield, newIdxArr) + } + + } + +} + +func (c *Coll[TData]) getTokenValueAsMongoType(value string, fieldName string) (any, error) { + fref := c.dataTypeMap[fieldName] + + if fref.IsPointer && value == "" { + pointertype := reflect.New(fref.Type).Type() + nilvalue := reflect.Zero(pointertype) + outvalue := nilvalue.Interface() + return outvalue, nil + } + + pointerize := func(v any) any { + if !fref.IsPointer { + return v + } + + rval1 := reflect.ValueOf(v) + rval2 := rval1.Convert(fref.Type) + rval3 := reflect.New(fref.Type) + rval3.Elem().Set(rval2) + return rval3.Interface() + } + + if fref.UnderlyingType == reflect.TypeOf("") { + + rt, ok := langext.TryCastType(value, fref.Type) + if !ok { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from string to %s", fieldName, fref.Type.String())) + } + + return pointerize(rt), nil + } + + if fref.UnderlyingType == reflect.TypeOf(time.Time{}) { + + t, err := time.Parse(time.RFC3339Nano, value) + if err != nil { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as time.Time ('%s')", fieldName, value)) + } + + rt, ok := langext.TryCastType(t, fref.Type) + if !ok { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from time.Time to %s", fieldName, fref.Type.String())) + } + + return pointerize(rt), nil + } + + if fref.UnderlyingType == reflect.TypeOf(int(0)) { + + t, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int64 ('%s')", fieldName, value)) + } + + rt, ok := langext.TryCastType(int(t), fref.Type) + if !ok { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int to %s", fieldName, fref.Type.String())) + } + + return pointerize(rt), nil + } + + if fref.UnderlyingType == reflect.TypeOf(int32(0)) { + + t, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int32 ('%s')", fieldName, value)) + } + + rt, ok := langext.TryCastType(int32(t), fref.Type) + if !ok { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int32 to %s", fieldName, fref.Type.String())) + } + + return pointerize(rt), nil + } + + if fref.UnderlyingType == reflect.TypeOf(int64(0)) { + + t, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int64 ('%s')", fieldName, value)) + } + + rt, ok := langext.TryCastType(int64(t), fref.Type) + if !ok { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int64 to %s", fieldName, fref.Type.String())) + } + + return pointerize(rt), nil + } + + if fref.UnderlyingType == reflect.TypeOf(float32(0)) { + + t, err := strconv.ParseFloat(value, 64) + if err != nil { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as float32 ('%s')", fieldName, value)) + } + + rt, ok := langext.TryCastType(float32(t), fref.Type) + if !ok { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from float32 to %s", fieldName, fref.Type.String())) + } + + return pointerize(rt), nil + } + + if fref.UnderlyingType == reflect.TypeOf(float64(0)) { + + t, err := strconv.ParseFloat(value, 64) + if err != nil { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as float64 ('%s')", fieldName, value)) + } + + rt, ok := langext.TryCastType(float64(t), fref.Type) + if !ok { + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from float64 to %s", fieldName, fref.Type.String())) + } + + return pointerize(rt), nil + } + + return nil, errors.New(fmt.Sprintf("failed to parse field '%s' of type %s (%s)", fieldName, fref.Type.String(), fref.UnderlyingType.String())) +} + +func (c *Coll[TData]) getFieldValueAsTokenString(entity TData, fieldName string) (string, error) { + fref := c.dataTypeMap[fieldName] + + realValue := c.getFieldValue(entity, fieldName) + + if langext.IsNil(realValue) { + return "", nil + } + + reflValue := reflect.ValueOf(realValue) + if reflValue.Kind() == reflect.Pointer { + reflValue = reflValue.Elem() + realValue = reflValue.Interface() + } + + if fref.UnderlyingType == reflect.TypeOf("") { + + rt, ok := langext.TryCastType(realValue, reflect.TypeOf("")) + if !ok { + return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to string", fieldName, fref.Type.String())) + } + + return rt.(string), nil + } + + if fref.UnderlyingType == reflect.TypeOf(time.Time{}) { + rt, ok := langext.TryCastType(realValue, reflect.TypeOf(time.Time{})) + if !ok { + return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to time.Time", fieldName, fref.Type.String())) + } + + return rt.(time.Time).Format(time.RFC3339Nano), nil + } + + if fref.UnderlyingType == reflect.TypeOf(int(0)) { + rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int(0))) + if !ok { + return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int", fieldName, fref.Type.String())) + } + + return strconv.Itoa(rt.(int)), nil + } + + if fref.UnderlyingType == reflect.TypeOf(int32(0)) { + rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int32(0))) + if !ok { + return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int32", fieldName, fref.Type.String())) + } + + return strconv.FormatInt(int64(rt.(int32)), 10), nil + } + + if fref.UnderlyingType == reflect.TypeOf(int64(0)) { + rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int64(0))) + if !ok { + return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int64", fieldName, fref.Type.String())) + } + + return strconv.FormatInt(rt.(int64), 10), nil + } + + if fref.UnderlyingType == reflect.TypeOf(float32(0)) { + rt, ok := langext.TryCastType(realValue, reflect.TypeOf(float32(0))) + if !ok { + return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to float32", fieldName, fref.Type.String())) + } + + return strconv.FormatFloat(float64(rt.(float32)), 'f', -1, 32), nil + } + + if fref.UnderlyingType == reflect.TypeOf(float64(0)) { + rt, ok := langext.TryCastType(realValue, reflect.TypeOf(float64(0))) + if !ok { + return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to float64", fieldName, fref.Type.String())) + } + + return strconv.FormatFloat(rt.(float64), 'f', -1, 64), nil + } + + return "", errors.New(fmt.Sprintf("failed to parse field '%s' of type %s (%s)", fieldName, fref.Type.String(), fref.UnderlyingType.String())) +} + +func (c *Coll[TData]) getFieldType(fieldName string) fullTypeRef[TData] { + return c.dataTypeMap[fieldName] +} + +func (c *Coll[TData]) getFieldValue(data TData, fieldName string) any { + fref := c.dataTypeMap[fieldName] + rval := reflect.ValueOf(data) + return rval.FieldByIndex(fref.Index).Interface() +} diff --git a/wmo/reflection_test.go b/wmo/reflection_test.go new file mode 100644 index 0000000..4267f48 --- /dev/null +++ b/wmo/reflection_test.go @@ -0,0 +1,160 @@ +package wmo + +import ( + "go.mongodb.org/mongo-driver/mongo" + "gogs.mikescher.com/BlackForestBytes/goext/langext" + "gogs.mikescher.com/BlackForestBytes/goext/tst" + "testing" + "time" +) + +func TestReflectionGetFieldType(t *testing.T) { + + type IDType string + + type TestData struct { + ID IDType `bson:"_id"` + CDate time.Time `bson:"cdate"` + Sub struct { + A string `bson:"a"` + } `bson:"sub"` + Str string `bson:"str"` + Ptr *int `bson:"ptr"` + } + + coll := W[TestData](&mongo.Collection{}) + + coll.init() + + t0 := time.Now() + + d := TestData{ + ID: "1", + CDate: t0, + Sub: struct { + A string `bson:"a"` + }{ + A: "2", + }, + Str: "3", + Ptr: langext.Ptr(4), + } + + tst.AssertEqual(t, coll.getFieldType("_id").Kind.String(), "string") + tst.AssertEqual(t, coll.getFieldType("_id").Type.String(), "wmo.IDType") + tst.AssertEqual(t, coll.getFieldType("_id").Name, "ID") + tst.AssertEqual(t, coll.getFieldType("_id").IsPointer, false) + tst.AssertEqual(t, coll.getFieldValue(d, "_id").(IDType), "1") + + tst.AssertEqual(t, coll.getFieldType("cdate").Kind.String(), "struct") + tst.AssertEqual(t, coll.getFieldType("cdate").Type.String(), "time.Time") + tst.AssertEqual(t, coll.getFieldType("cdate").Name, "CDate") + tst.AssertEqual(t, coll.getFieldType("cdate").IsPointer, false) + tst.AssertEqual(t, coll.getFieldValue(d, "cdate").(time.Time), t0) + + tst.AssertEqual(t, coll.getFieldType("sub.a").Kind.String(), "string") + tst.AssertEqual(t, coll.getFieldType("sub.a").Type.String(), "string") + tst.AssertEqual(t, coll.getFieldType("sub.a").Name, "A") + tst.AssertEqual(t, coll.getFieldType("sub.a").IsPointer, false) + tst.AssertEqual(t, coll.getFieldValue(d, "sub.a").(string), "2") + + tst.AssertEqual(t, coll.getFieldType("str").Kind.String(), "string") + tst.AssertEqual(t, coll.getFieldType("str").Type.String(), "string") + tst.AssertEqual(t, coll.getFieldType("str").Name, "Str") + tst.AssertEqual(t, coll.getFieldType("str").IsPointer, false) + tst.AssertEqual(t, coll.getFieldValue(d, "str").(string), "3") + + tst.AssertEqual(t, coll.getFieldType("ptr").Kind.String(), "int") + tst.AssertEqual(t, coll.getFieldType("ptr").Type.String(), "int") + tst.AssertEqual(t, coll.getFieldType("ptr").Name, "Ptr") + tst.AssertEqual(t, coll.getFieldType("ptr").IsPointer, true) + tst.AssertEqual(t, *coll.getFieldValue(d, "ptr").(*int), 4) +} + +func TestReflectionGetTokenValueAsMongoType(t *testing.T) { + + type IDType string + + type TestData struct { + ID IDType `bson:"_id"` + CDate time.Time `bson:"cdate"` + Sub struct { + A string `bson:"a"` + } `bson:"sub"` + Str string `bson:"str"` + Ptr *int `bson:"ptr"` + Num int `bson:"num"` + } + + coll := W[TestData](&mongo.Collection{}) + + coll.init() + + gtvasmt := func(value string, fieldName string) any { + v, err := coll.getTokenValueAsMongoType(value, fieldName) + if err != nil { + t.Errorf("%s", "failed to getTokenValueAsMongoType") + } + return v + } + + tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello") + tst.AssertEqual(t, gtvasmt("4", "num").(int), 4) + tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf") + tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil) + tst.AssertEqual(t, *(gtvasmt("123", "ptr").(*int)), 123) +} + +func TestReflectionGetFieldValueAsTokenString(t *testing.T) { + + type IDType string + + type TestData struct { + ID IDType `bson:"_id"` + CDate time.Time `bson:"cdate"` + Sub struct { + A string `bson:"a"` + } `bson:"sub"` + Str string `bson:"str"` + Ptr *int `bson:"ptr"` + Num int `bson:"num"` + Ptr2 *int `bson:"ptr2"` + FFF float64 `bson:"fff"` + } + + coll := W[TestData](&mongo.Collection{}) + + coll.init() + + t0 := time.Now() + + d := TestData{ + ID: "1", + CDate: t0, + Sub: struct { + A string `bson:"a"` + }{ + A: "2", + }, + Str: "3", + Ptr: langext.Ptr(4), + Num: 22, + FFF: 22.5, + Ptr2: nil, + } + + gfvats := func(value TestData, fieldName string) string { + v, err := coll.getFieldValueAsTokenString(value, fieldName) + if err != nil { + t.Errorf("%s", "failed to getTokenValueAsMongoType") + } + return v + } + + tst.AssertEqual(t, gfvats(d, "str"), "3") + tst.AssertEqual(t, gfvats(d, "num"), "22") + tst.AssertEqual(t, gfvats(d, "_id"), "1") + tst.AssertEqual(t, gfvats(d, "ptr"), "4") + tst.AssertEqual(t, gfvats(d, "ptr2"), "") + tst.AssertEqual(t, gfvats(d, "fff"), "22.5") +} diff --git a/wmo/wrapper.go b/wmo/wrapper.go new file mode 100644 index 0000000..f55eea3 --- /dev/null +++ b/wmo/wrapper.go @@ -0,0 +1,231 @@ +package wmo + +import ( + "context" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" + "gogs.mikescher.com/BlackForestBytes/goext/langext" + "reflect" +) + +type fullTypeRef[TData any] struct { + IsPointer bool + Kind reflect.Kind + Type reflect.Type + UnderlyingType reflect.Type + Name string + Index []int +} + +type Coll[TData any] struct { + coll *mongo.Collection + dataTypeMap map[string]fullTypeRef[TData] +} + +func (c *Coll[TData]) Collection() *mongo.Collection { + return c.coll +} + +func (c *Coll[TData]) Name() string { + return c.coll.Name() +} + +func (c *Coll[TData]) Indexes() mongo.IndexView { + return c.coll.Indexes() +} + +func (c *Coll[TData]) Drop(ctx context.Context) error { + return c.coll.Drop(ctx) +} + +func (c *Coll[TData]) FindOne(ctx context.Context, filter any) (TData, error) { + var res TData + + err := c.coll.FindOne(ctx, filter).Decode(&res) + if err != nil { + return *new(TData), err + } + + return res, nil +} + +func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter any) (*TData, error) { + var res TData + + err := c.coll.FindOne(ctx, filter).Decode(&res) + if err == mongo.ErrNoDocuments { + return nil, nil + } + if err != nil { + return nil, err + } + + return &res, nil +} + +func (c *Coll[TData]) FindOneByID(ctx context.Context, id any) (TData, error) { + var res TData + + err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res) + if err != nil { + return *new(TData), err + } + + return res, nil +} + +func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id any) (*TData, error) { + var res TData + + err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res) + if err == mongo.ErrNoDocuments { + return nil, nil + } + if err != nil { + return nil, err + } + + return &res, nil +} + +func (c *Coll[TData]) Find(ctx context.Context, filter any, opts ...*options.FindOptions) ([]TData, error) { + cursor, err := c.coll.Find(ctx, filter, opts...) + if err != nil { + return nil, err + } + + res := make([]TData, 0, cursor.RemainingBatchLength()) + err = cursor.All(ctx, &res) + if err != nil { + return nil, err + } + + return res, nil +} + +func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { + cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) + if err != nil { + return nil, err + } + + res := make([]TData, 0, cursor.RemainingBatchLength()) + err = cursor.All(ctx, &res) + if err != nil { + return nil, err + } + + return res, nil +} + +func (c *Coll[TData]) ReplaceOne(ctx context.Context, id any, value TData) error { + _, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, value) + if err != nil { + return err + } + + return nil +} + +func (c *Coll[TData]) UpdateOne(ctx context.Context, id any, updateQuery any) error { + _, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery) + if err != nil { + return err + } + + return nil +} + +func (c *Coll[TData]) UpdateOneAndReturn(ctx context.Context, id any, updateQuery any) (TData, error) { + _, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery) + if err != nil { + return *new(TData), err + } + + var res TData + + err = c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res) + if err != nil { + return *new(TData), err + } + + return res, nil +} + +func (c *Coll[TData]) DeleteOne(ctx context.Context, id any) error { + _, err := c.coll.DeleteOne(ctx, bson.M{"_id": id}) + if err != nil { + return err + } + + return nil +} + +func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { + if inTok.Mode == ct.CTMEnd { + return make([]TData, 0), ct.End(), nil + } + + pipeline := filter.FilterQuery() + + sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary := filter.Pagination() + + paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) + if err != nil { + return nil, ct.CursorToken{}, err + } + + pipeline = append(pipeline, paginationPipeline...) + + cursor, err := c.coll.Aggregate(ctx, pipeline) + if err != nil { + return nil, ct.CursorToken{}, err + } + + entities := make([]TData, 0, cursor.RemainingBatchLength()+1) + for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) { + var entry TData + err = cursor.Decode(&entry) + if err != nil { + return nil, ct.CursorToken{}, err + } + entities = append(entities, entry) + } + + if pageSize == nil || len(entities) <= *pageSize || !cursor.TryNext(ctx) { + return entities, ct.End(), nil + } + + last := entities[len(entities)-1] + + nextToken, _ := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) + + return entities, nextToken, nil +} + +func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { + + valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) + if err != nil { + return ct.CursorToken{}, err + } + + valueSeconary := "" + if fieldSecondary != nil && dirSecondary != nil { + valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) + if err != nil { + return ct.CursorToken{}, err + } + } + + return ct.CursorToken{ + Mode: ct.CTMNormal, + ValuePrimary: valuePrimary, + ValueSecondary: valueSeconary, + Direction: dirPrimary, + PageSize: langext.Coalesce(pageSize, 0), + Extra: ct.Extra{}, + }, nil +}