21
0
Fork 0

v0.0.304 add support for `WithModifyingPipeline` to wmo
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s Details

This commit is contained in:
Mike Schwörer 2023-11-09 09:26:46 +01:00
parent 45031b05cf
commit f3ecba3883
Signed by: Mikescher
GPG Key ID: D3C7172E0A70F8CF
13 changed files with 268 additions and 193 deletions

4
go.mod
View File

@ -8,7 +8,7 @@ require (
github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.31.0
go.mongodb.org/mongo-driver v1.13.0
golang.org/x/crypto v0.14.0
golang.org/x/crypto v0.15.0
golang.org/x/sys v0.14.0
golang.org/x/term v0.14.0
)
@ -41,7 +41,7 @@ require (
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
golang.org/x/arch v0.6.0 // indirect
golang.org/x/net v0.17.0 // indirect
golang.org/x/net v0.18.0 // indirect
golang.org/x/sync v0.5.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect

4
go.sum
View File

@ -131,6 +131,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA=
golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -141,6 +143,8 @@ golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos=
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=

View File

@ -1,5 +1,5 @@
package goext
const GoextVersion = "0.0.303"
const GoextVersion = "0.0.304"
const GoextVersionTimestamp = "2023-11-08T19:01:15+0100"
const GoextVersionTimestamp = "2023-11-09T09:26:46+0100"

View File

@ -45,6 +45,7 @@ type Coll[TData any] struct {
customDecoder *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface)
isInterfaceDataType bool // true if TData is an interface (not a struct)
unmarshalHooks []func(d TData) TData // called for every object after unmarshalling
extraModPipeline mongo.Pipeline // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc
}
func (c *Coll[TData]) Collection() *mongo.Collection {
@ -81,6 +82,12 @@ func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] {
return c
}
func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] {
c.extraModPipeline = append(c.extraModPipeline, p...)
return c
}
func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) {
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)

View File

@ -2,76 +2,88 @@ package wmo
import (
"context"
"go.mongodb.org/mongo-driver/bson"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
)
func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) {
if c.customDecoder != nil {
res, err := (*c.customDecoder)(ctx, dec)
var res TData
var err error
if c.customDecoder != nil {
res, err = (*c.customDecoder)(ctx, dec)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
}
for _, hook := range c.unmarshalHooks {
res = hook(res)
}
return res, nil
} else {
var res TData
err := dec.Decode(&res)
err = dec.Decode(&res)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build()
}
for _, hook := range c.unmarshalHooks {
res = hook(res)
}
return res, nil
}
for _, hook := range c.unmarshalHooks {
res = hook(res)
}
return res, nil
}
func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) {
res := make([]TData, 0, cursor.RemainingBatchLength())
if c.customDecoder != nil {
res := make([]TData, 0, cursor.RemainingBatchLength())
for cursor.Next(ctx) {
entry, err := (*c.customDecoder)(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
}
for _, hook := range c.unmarshalHooks {
entry = hook(entry)
}
res = append(res, entry)
}
return res, nil
} else {
res := make([]TData, 0, cursor.RemainingBatchLength())
err := cursor.All(ctx, &res)
if err != nil {
return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build()
}
for i := 0; i < len(res); i++ {
for _, hook := range c.unmarshalHooks {
res[i] = hook(res[i])
}
}
return res, nil
}
for i := 0; i < len(res); i++ {
for _, hook := range c.unmarshalHooks {
res[i] = hook(res[i])
}
}
return res, nil
}
func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) (TData, error) {
if c.extraModPipeline == nil {
// simple case, we can just decode the result and return it
return c.decodeSingle(ctx, dec)
} else {
// annyoing case, we have a extraModPipeline and need to re-query the document such that the extraModPipeline is applied...
type genDoc struct {
ID any `bson:"_id"`
}
var res genDoc
err := dec.Decode(&res)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").Build()
}
v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).Build()
}
return *v, nil
}
}

View File

@ -1,87 +0,0 @@
package wmo
import (
"go.mongodb.org/mongo-driver/bson"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
)
func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
}
pipeline := make([]bson.D, 0, 3)
if token.Mode == ct.CTMStart {
// no gt/lt condition
} else if token.Mode == ct.CTMNormal {
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
} else if token.Mode == ct.CTMEnd {
// false
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
} else {
return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, nil
}

View File

@ -5,9 +5,13 @@ import (
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
@ -22,6 +26,9 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op
}
func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
@ -39,6 +46,9 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli
}
func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()

View File

@ -2,69 +2,22 @@ package wmo
import (
"context"
"errors"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
mongoRes := c.coll.FindOne(ctx, filter)
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").
Str("collection", c.Name()).
Any("filter", filter).
Build()
}
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M) ([]TData, error) {
return c.decodeSingle(ctx, mongoRes)
}
pipeline := mongo.Pipeline{}
pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}})
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
mongoRes := c.coll.FindOne(ctx, filter)
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
res, err := c.decodeSingle(ctx, mongoRes)
if errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil
}
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Str("collection", c.Name()).Build()
}
return &res, nil
}
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").
Str("collection", c.Name()).
Id("id", id).
Build()
}
return c.decodeSingle(ctx, mongoRes)
}
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
res, err := c.decodeSingle(ctx, mongoRes)
if errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return &res, nil
}
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) {
cursor, err := c.coll.Find(ctx, filter, opts...)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Any("opts", opts).Str("collection", c.Name()).Build()
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
res, err := c.decodeAll(ctx, cursor)

93
wmo/queryFindOne.go Normal file
View File

@ -0,0 +1,93 @@
package wmo
import (
"context"
"errors"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
r, err := c.findOneInternal(ctx, filter, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
r, err := c.findOneInternal(ctx, filter, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build()
}
return r, nil
}
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
r, err := c.findOneInternal(ctx, bson.M{"_id": id}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
r, err := c.findOneInternal(ctx, bson.M{"_id": id}, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return r, nil
}
func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowNull bool) (*TData, error) {
if len(c.extraModPipeline) == 0 {
// simple case, use mongo FindOne
mongoRes := c.coll.FindOne(ctx, filter)
res, err := c.decodeSingle(ctx, mongoRes)
if allowNull && errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).Build()
}
return &res, nil
} else {
// complex case, we one ore more additional pipeline stages, convert to aggregation
pipeline := mongo.Pipeline{}
pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}})
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}})
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
return &v, nil
} else if allowNull {
return nil, nil
} else {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
}
}

View File

@ -14,9 +14,11 @@ func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, erro
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID})
return c.decodeSingle(ctx, mongoRes)
r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) {

View File

@ -34,7 +34,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
sortDirSecondary = nil
}
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.
Wrap(err, "failed to create pagination").
@ -50,6 +50,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
}
pipeline = append(pipeline, paginationPipeline...)
pipeline = append(pipeline, c.extraModPipeline...)
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
@ -130,3 +131,83 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS
}
return data, token, count, nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
}
pipeline := make([]bson.D, 0, 3)
if token.Mode == ct.CTMStart {
// no gt/lt condition
} else if token.Mode == ct.CTMNormal {
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
} else if token.Mode == ct.CTMEnd {
// false
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
} else {
return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, nil
}

View File

@ -46,7 +46,7 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int,
pipelineCount := mongo.Pipeline{}
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$count", Value: "count"}})
pipelineList := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelinePaginate)
pipelineList := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelinePaginate, c.extraModPipeline)
pipelineTotalCount := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelineCount)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)

View File

@ -18,7 +18,7 @@ func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M,
Build()
}
return c.decodeSingle(ctx, mongoRes)
return c.decodeSingleOrRequery(ctx, mongoRes)
}
func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error {
@ -81,5 +81,5 @@ func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M,
Build()
}
return c.decodeSingle(ctx, mongoRes)
return c.decodeSingleOrRequery(ctx, mongoRes)
}