2023-06-06 21:33:49 +02:00
|
|
|
package wmo
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"go.mongodb.org/mongo-driver/bson"
|
2023-06-22 15:07:06 +02:00
|
|
|
"go.mongodb.org/mongo-driver/mongo"
|
2023-06-06 21:33:49 +02:00
|
|
|
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
2023-08-21 15:08:35 +02:00
|
|
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
2024-01-09 08:51:46 +01:00
|
|
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
2023-06-06 21:33:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
|
|
|
|
if inTok.Mode == ct.CTMEnd {
|
|
|
|
return make([]TData, 0), ct.End(), nil
|
|
|
|
}
|
|
|
|
|
2024-05-03 11:56:29 +02:00
|
|
|
if pageSize != nil && *pageSize == 0 {
|
|
|
|
return make([]TData, 0), inTok, nil // fast track, we return an empty list and do not advance the cursor token
|
|
|
|
}
|
|
|
|
|
2023-06-22 15:07:06 +02:00
|
|
|
pipeline := mongo.Pipeline{}
|
|
|
|
pf1 := "_id"
|
|
|
|
pd1 := ct.SortASC
|
|
|
|
pf2 := "_id"
|
|
|
|
pd2 := ct.SortASC
|
2023-06-06 21:33:49 +02:00
|
|
|
|
2023-06-22 15:07:06 +02:00
|
|
|
if filter != nil {
|
2024-06-14 17:24:59 +02:00
|
|
|
pipeline = filter.FilterQuery(ctx)
|
|
|
|
pf1, pd1, pf2, pd2 = filter.Pagination(ctx)
|
2023-06-22 15:07:06 +02:00
|
|
|
}
|
2023-06-07 10:42:56 +02:00
|
|
|
|
|
|
|
sortPrimary := pf1
|
|
|
|
sortDirPrimary := pd1
|
|
|
|
sortSecondary := &pf2
|
|
|
|
sortDirSecondary := &pd2
|
|
|
|
|
|
|
|
if pf1 == pf2 {
|
|
|
|
sortSecondary = nil
|
|
|
|
sortDirSecondary = nil
|
|
|
|
}
|
2023-06-06 21:33:49 +02:00
|
|
|
|
2024-04-23 16:12:17 +02:00
|
|
|
paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
|
2023-06-06 21:33:49 +02:00
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.
|
|
|
|
Wrap(err, "failed to create pagination").
|
|
|
|
WithType(exerr.TypeCursorTokenDecode).
|
|
|
|
Str("collection", c.Name()).
|
|
|
|
Any("inTok", inTok).
|
|
|
|
Any("sortPrimary", sortPrimary).
|
|
|
|
Any("sortDirPrimary", sortDirPrimary).
|
|
|
|
Any("sortSecondary", sortSecondary).
|
|
|
|
Any("sortDirSecondary", sortDirSecondary).
|
|
|
|
Any("pageSize", pageSize).
|
|
|
|
Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pipeline = append(pipeline, paginationPipeline...)
|
2024-01-09 08:51:46 +01:00
|
|
|
|
|
|
|
for _, ppl := range c.extraModPipeline {
|
|
|
|
pipeline = langext.ArrConcat(pipeline, ppl(ctx))
|
|
|
|
}
|
2023-06-06 21:33:49 +02:00
|
|
|
|
2024-04-23 16:12:17 +02:00
|
|
|
if c.needsDoubleSort(ctx) {
|
|
|
|
pipeline = langext.ArrConcat(pipeline, doubleSortPipeline)
|
|
|
|
}
|
|
|
|
|
2023-06-06 21:33:49 +02:00
|
|
|
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
|
2024-06-28 18:37:02 +02:00
|
|
|
defer func() { _ = cursor.Close(ctx) }()
|
|
|
|
|
2023-06-10 16:22:14 +02:00
|
|
|
// fast branch
|
|
|
|
if pageSize == nil {
|
|
|
|
entries, err := c.decodeAll(ctx, cursor)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build()
|
2023-06-10 16:22:14 +02:00
|
|
|
}
|
|
|
|
return entries, ct.End(), nil
|
|
|
|
}
|
|
|
|
|
2023-06-07 17:57:03 +02:00
|
|
|
entities := make([]TData, 0, cursor.RemainingBatchLength())
|
2023-06-06 21:33:49 +02:00
|
|
|
for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) {
|
|
|
|
var entry TData
|
2023-06-10 16:22:14 +02:00
|
|
|
entry, err = c.decodeSingle(ctx, cursor)
|
2023-06-06 21:33:49 +02:00
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
entities = append(entities, entry)
|
|
|
|
}
|
|
|
|
|
2023-06-07 17:57:03 +02:00
|
|
|
if pageSize == nil || len(entities) < *pageSize || !cursor.TryNext(ctx) {
|
2023-06-06 21:33:49 +02:00
|
|
|
return entities, ct.End(), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
last := entities[len(entities)-1]
|
|
|
|
|
2023-06-10 18:35:56 +02:00
|
|
|
c.EnsureInitializedReflection(last)
|
|
|
|
|
2023-06-07 17:57:03 +02:00
|
|
|
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build()
|
2023-06-07 17:57:03 +02:00
|
|
|
}
|
2023-06-06 21:33:49 +02:00
|
|
|
|
|
|
|
return entities, nextToken, nil
|
|
|
|
}
|
|
|
|
|
2023-11-08 18:30:30 +01:00
|
|
|
func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) {
|
|
|
|
type countRes struct {
|
|
|
|
Count int64 `bson:"c"`
|
|
|
|
}
|
2023-06-07 11:28:07 +02:00
|
|
|
|
2024-06-14 17:24:59 +02:00
|
|
|
pipeline := filter.FilterQuery(ctx)
|
2023-06-06 21:33:49 +02:00
|
|
|
|
|
|
|
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
|
|
|
|
|
|
|
|
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return 0, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
|
2024-06-28 18:37:02 +02:00
|
|
|
defer func() { _ = cursor.Close(ctx) }()
|
|
|
|
|
2023-06-06 21:33:49 +02:00
|
|
|
if cursor.Next(ctx) {
|
2023-06-07 11:28:07 +02:00
|
|
|
v := countRes{}
|
2023-06-06 21:33:49 +02:00
|
|
|
err = cursor.Decode(&v)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return 0, exerr.Wrap(err, "failed to decode entity").Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
return v.Count, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0, nil
|
|
|
|
}
|
2023-09-18 12:56:56 +02:00
|
|
|
|
|
|
|
func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) {
|
2023-09-18 12:57:27 +02:00
|
|
|
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
|
2023-09-18 12:56:56 +02:00
|
|
|
count, err := c.Count(ctx, filter)
|
|
|
|
if err != nil {
|
|
|
|
return nil, ct.CursorToken{}, 0, err
|
|
|
|
}
|
|
|
|
|
|
|
|
data, token, err := c.List(ctx, filter, pageSize, inTok)
|
|
|
|
if err != nil {
|
|
|
|
return nil, ct.CursorToken{}, 0, err
|
|
|
|
}
|
|
|
|
return data, token, count, nil
|
|
|
|
}
|
2023-11-09 09:26:46 +01:00
|
|
|
|
2024-04-29 17:19:55 +02:00
|
|
|
func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) {
|
|
|
|
type idObject struct {
|
|
|
|
ID string `bson:"_id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
pipelineFilter := mongo.Pipeline{}
|
|
|
|
|
|
|
|
if filter != nil {
|
2024-06-14 17:24:59 +02:00
|
|
|
pipelineFilter = filter.FilterQuery(ctx)
|
2024-04-29 17:19:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
extrModPipelineResolved := mongo.Pipeline{}
|
|
|
|
for _, ppl := range c.extraModPipeline {
|
|
|
|
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
|
|
|
|
}
|
|
|
|
|
|
|
|
pipelineProjectIDs := mongo.Pipeline{}
|
|
|
|
pipelineProjectIDs = append(pipelineProjectIDs, bson.D{{Key: "$project", Value: bson.M{"_id": 1}}})
|
|
|
|
|
|
|
|
pipelineList := langext.ArrConcat(pipelineFilter, extrModPipelineResolved, pipelineProjectIDs)
|
|
|
|
|
|
|
|
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
|
|
|
|
if err != nil {
|
|
|
|
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
|
|
|
|
}
|
|
|
|
|
|
|
|
var res []idObject
|
|
|
|
|
2024-04-29 17:24:10 +02:00
|
|
|
err = cursorList.All(ctx, &res)
|
2024-04-29 17:19:55 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, exerr.Wrap(err, "failed to decode entities").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
|
|
|
|
}
|
|
|
|
|
|
|
|
return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil
|
|
|
|
}
|
|
|
|
|
2024-04-23 16:12:17 +02:00
|
|
|
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
|
2023-11-09 09:26:46 +01:00
|
|
|
|
|
|
|
cond := bson.A{}
|
|
|
|
sort := bson.D{}
|
|
|
|
|
|
|
|
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
|
|
|
|
if err != nil {
|
2024-04-23 16:12:17 +02:00
|
|
|
return nil, nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
|
2023-11-09 09:26:46 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if sortPrimary == ct.SortASC {
|
|
|
|
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
|
|
|
|
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
|
|
|
|
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
|
|
|
|
} else if sortPrimary == ct.SortDESC {
|
|
|
|
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
|
|
|
|
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
|
|
|
|
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
|
|
|
|
}
|
|
|
|
|
|
|
|
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
|
|
|
|
|
|
|
|
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
|
|
|
|
if err != nil {
|
2024-04-23 16:12:17 +02:00
|
|
|
return nil, nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
|
2023-11-09 09:26:46 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if *sortSecondary == ct.SortASC {
|
|
|
|
|
|
|
|
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
|
|
|
|
cond = append(cond, bson.M{"$and": bson.A{
|
|
|
|
bson.M{fieldPrimary: valuePrimary},
|
|
|
|
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
|
|
|
|
}})
|
|
|
|
|
2024-03-24 15:25:52 +01:00
|
|
|
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
|
2023-11-09 09:26:46 +01:00
|
|
|
|
|
|
|
} else if *sortSecondary == ct.SortDESC {
|
|
|
|
|
|
|
|
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
|
|
|
|
cond = append(cond, bson.M{"$and": bson.A{
|
|
|
|
bson.M{fieldPrimary: valuePrimary},
|
|
|
|
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
|
|
|
|
}})
|
|
|
|
|
2024-03-24 15:25:52 +01:00
|
|
|
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
|
2023-11-09 09:26:46 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pipeline := make([]bson.D, 0, 3)
|
|
|
|
|
|
|
|
if token.Mode == ct.CTMStart {
|
|
|
|
|
|
|
|
// no gt/lt condition
|
|
|
|
|
|
|
|
} else if token.Mode == ct.CTMNormal {
|
|
|
|
|
|
|
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
|
|
|
|
|
|
|
|
} else if token.Mode == ct.CTMEnd {
|
|
|
|
|
|
|
|
// false
|
|
|
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
2024-04-23 16:12:17 +02:00
|
|
|
return nil, nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
|
2023-11-09 09:26:46 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
|
|
|
|
|
2024-04-23 16:12:17 +02:00
|
|
|
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
|
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
if pageSize != nil {
|
|
|
|
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
|
|
|
|
}
|
|
|
|
|
2024-04-23 16:12:17 +02:00
|
|
|
return pipeline, pipelineSort, nil
|
2023-11-09 09:26:46 +01:00
|
|
|
}
|