goext/wmo/queryList.go

378 lines
12 KiB
Go
Raw Normal View History

2023-06-06 21:33:49 +02:00
package wmo
import (
"context"
"go.mongodb.org/mongo-driver/bson"
2023-06-22 15:07:06 +02:00
"go.mongodb.org/mongo-driver/mongo"
2023-06-06 21:33:49 +02:00
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
2023-08-21 15:08:35 +02:00
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
2024-01-09 08:51:46 +01:00
"gogs.mikescher.com/BlackForestBytes/goext/langext"
2023-06-06 21:33:49 +02:00
)
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
if inTok == nil {
inTok = ct.Start()
}
if ctks, ok := inTok.(ct.CTKeySort); ok {
d, tok, err := c.listWithKSToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else if ctks, ok := inTok.(ct.CTPaginated); ok {
d, tok, err := c.listWithPaginatedToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else {
return nil, ct.End(), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build()
}
}
func (c *Coll[TData]) listWithKSToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) ([]TData, ct.CursorToken, error) {
2023-06-06 21:33:49 +02:00
if inTok.Mode == ct.CTMEnd {
return make([]TData, 0), ct.End(), nil
}
2024-05-03 11:56:29 +02:00
if pageSize != nil && *pageSize == 0 {
return make([]TData, 0), inTok, nil // fast track, we return an empty list and do not advance the cursor token
}
2023-06-22 15:07:06 +02:00
pipeline := mongo.Pipeline{}
pf1 := "_id"
pd1 := ct.SortASC
pf2 := "_id"
pd2 := ct.SortASC
2023-06-06 21:33:49 +02:00
2023-06-22 15:07:06 +02:00
if filter != nil {
pipeline = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 = filter.Pagination(ctx)
2023-06-22 15:07:06 +02:00
}
2023-06-07 10:42:56 +02:00
sortPrimary := pf1
sortDirPrimary := pd1
sortSecondary := &pf2
sortDirSecondary := &pd2
if pf1 == pf2 {
sortSecondary = nil
sortDirSecondary = nil
}
2023-06-06 21:33:49 +02:00
paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
2023-06-06 21:33:49 +02:00
if err != nil {
return nil, nil, exerr.
2023-08-21 15:08:35 +02:00
Wrap(err, "failed to create pagination").
WithType(exerr.TypeCursorTokenDecode).
Str("collection", c.Name()).
Any("inTok", inTok).
Any("sortPrimary", sortPrimary).
Any("sortDirPrimary", sortDirPrimary).
Any("sortSecondary", sortSecondary).
Any("sortDirSecondary", sortDirSecondary).
Any("pageSize", pageSize).
Build()
2023-06-06 21:33:49 +02:00
}
pipeline = append(pipeline, paginationPipeline...)
2024-01-09 08:51:46 +01:00
for _, ppl := range c.extraModPipeline {
pipeline = langext.ArrConcat(pipeline, ppl(ctx))
}
2023-06-06 21:33:49 +02:00
if c.needsDoubleSort(ctx) {
pipeline = langext.ArrConcat(pipeline, doubleSortPipeline)
}
2023-06-06 21:33:49 +02:00
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
2023-06-06 21:33:49 +02:00
}
defer func() { _ = cursor.Close(ctx) }()
2023-06-10 16:22:14 +02:00
// fast branch
if pageSize == nil {
entries, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
2023-06-10 16:22:14 +02:00
}
return entries, ct.End(), nil
}
2023-06-07 17:57:03 +02:00
entities := make([]TData, 0, cursor.RemainingBatchLength())
2023-06-06 21:33:49 +02:00
for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) {
var entry TData
2023-06-10 16:22:14 +02:00
entry, err = c.decodeSingle(ctx, cursor)
2023-06-06 21:33:49 +02:00
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to decode entity").Build()
2023-06-06 21:33:49 +02:00
}
entities = append(entities, entry)
}
2023-06-07 17:57:03 +02:00
if pageSize == nil || len(entities) < *pageSize || !cursor.TryNext(ctx) {
2023-06-06 21:33:49 +02:00
return entities, ct.End(), nil
}
last := entities[len(entities)-1]
2023-06-10 18:35:56 +02:00
c.EnsureInitializedReflection(last)
2023-06-07 17:57:03 +02:00
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to create (out)-token").Build()
2023-06-07 17:57:03 +02:00
}
2023-06-06 21:33:49 +02:00
return entities, nextToken, nil
}
func (c *Coll[TData]) listWithPaginatedToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) ([]TData, ct.CursorToken, error) {
var err error
page := inTok.Page
if page < 0 {
page = 1
}
pipelineSort := mongo.Pipeline{}
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 := filter.Pagination(ctx)
pipelineSort, err = createSortOnlyPipeline(pf1, pd1, &pf2, &pd2)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to create sort pipeline").Build()
}
}
pipelinePaginate := mongo.Pipeline{}
if pageSize != nil {
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *pageSize * (page - 1)}})
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *pageSize}})
} else {
page = 1
}
pipelineCount := mongo.Pipeline{}
pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}})
extrModPipelineResolved := mongo.Pipeline{}
for _, ppl := range c.extraModPipeline {
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
}
pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
entities, err := c.decodeAll(ctx, cursorList)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
tokOut := ct.Page(page + 1)
if pageSize == nil || len(entities) < *pageSize {
tokOut = ct.PageEnd()
}
return entities, tokOut, nil
}
func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) {
type countRes struct {
Count int64 `bson:"c"`
}
2023-06-07 11:28:07 +02:00
pipeline := filter.FilterQuery(ctx)
2023-06-06 21:33:49 +02:00
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
2023-08-21 15:08:35 +02:00
return 0, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
2023-06-06 21:33:49 +02:00
}
defer func() { _ = cursor.Close(ctx) }()
2023-06-06 21:33:49 +02:00
if cursor.Next(ctx) {
2023-06-07 11:28:07 +02:00
v := countRes{}
2023-06-06 21:33:49 +02:00
err = cursor.Decode(&v)
if err != nil {
2023-08-21 15:08:35 +02:00
return 0, exerr.Wrap(err, "failed to decode entity").Build()
2023-06-06 21:33:49 +02:00
}
return v.Count, nil
}
return 0, nil
}
2023-09-18 12:56:56 +02:00
func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) {
2023-09-18 12:57:27 +02:00
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
2023-09-18 12:56:56 +02:00
count, err := c.Count(ctx, filter)
if err != nil {
return nil, nil, 0, err
2023-09-18 12:56:56 +02:00
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, nil, 0, err
2023-09-18 12:56:56 +02:00
}
return data, token, count, nil
}
2024-04-29 17:19:55 +02:00
func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) {
type idObject struct {
ID string `bson:"_id"`
}
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery(ctx)
2024-04-29 17:19:55 +02:00
}
extrModPipelineResolved := mongo.Pipeline{}
for _, ppl := range c.extraModPipeline {
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
}
pipelineProjectIDs := mongo.Pipeline{}
pipelineProjectIDs = append(pipelineProjectIDs, bson.D{{Key: "$project", Value: bson.M{"_id": 1}}})
pipelineList := langext.ArrConcat(pipelineFilter, extrModPipelineResolved, pipelineProjectIDs)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
var res []idObject
2024-04-29 17:24:10 +02:00
err = cursorList.All(ctx, &res)
2024-04-29 17:19:55 +02:00
if err != nil {
return nil, exerr.Wrap(err, "failed to decode entities").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CTKeySort, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
2024-09-16 17:39:51 +02:00
bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
2024-09-16 17:39:51 +02:00
bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
}
}
pipeline := make([]bson.D, 0, 3)
if token.Mode == ct.CTMStart {
// no gt/lt condition
} else if token.Mode == ct.CTMNormal {
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
} else if token.Mode == ct.CTMEnd {
// false
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
} else {
return nil, nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, pipelineSort, nil
}
func createSortOnlyPipeline(fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection) ([]bson.D, error) {
sort := bson.D{}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
if *sortSecondary == ct.SortASC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
}
}
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
return pipelineSort, nil
}