2023-06-06 21:33:49 +02:00
|
|
|
package wmo
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"go.mongodb.org/mongo-driver/bson"
|
2023-06-22 15:07:06 +02:00
|
|
|
"go.mongodb.org/mongo-driver/mongo"
|
2023-06-06 21:33:49 +02:00
|
|
|
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
2023-08-21 15:08:35 +02:00
|
|
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
2023-06-06 21:33:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
|
|
|
|
if inTok.Mode == ct.CTMEnd {
|
|
|
|
return make([]TData, 0), ct.End(), nil
|
|
|
|
}
|
|
|
|
|
2023-06-22 15:07:06 +02:00
|
|
|
pipeline := mongo.Pipeline{}
|
|
|
|
pf1 := "_id"
|
|
|
|
pd1 := ct.SortASC
|
|
|
|
pf2 := "_id"
|
|
|
|
pd2 := ct.SortASC
|
2023-06-06 21:33:49 +02:00
|
|
|
|
2023-06-22 15:07:06 +02:00
|
|
|
if filter != nil {
|
|
|
|
pipeline = filter.FilterQuery()
|
2023-09-11 11:28:26 +02:00
|
|
|
pf1, pd1, pf2, pd2 = filter.Pagination()
|
2023-06-22 15:07:06 +02:00
|
|
|
}
|
2023-06-07 10:42:56 +02:00
|
|
|
|
|
|
|
sortPrimary := pf1
|
|
|
|
sortDirPrimary := pd1
|
|
|
|
sortSecondary := &pf2
|
|
|
|
sortDirSecondary := &pd2
|
|
|
|
|
|
|
|
if pf1 == pf2 {
|
|
|
|
sortSecondary = nil
|
|
|
|
sortDirSecondary = nil
|
|
|
|
}
|
2023-06-06 21:33:49 +02:00
|
|
|
|
|
|
|
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.
|
|
|
|
Wrap(err, "failed to create pagination").
|
|
|
|
WithType(exerr.TypeCursorTokenDecode).
|
|
|
|
Str("collection", c.Name()).
|
|
|
|
Any("inTok", inTok).
|
|
|
|
Any("sortPrimary", sortPrimary).
|
|
|
|
Any("sortDirPrimary", sortDirPrimary).
|
|
|
|
Any("sortSecondary", sortSecondary).
|
|
|
|
Any("sortDirSecondary", sortDirSecondary).
|
|
|
|
Any("pageSize", pageSize).
|
|
|
|
Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pipeline = append(pipeline, paginationPipeline...)
|
|
|
|
|
|
|
|
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
|
2023-06-10 16:22:14 +02:00
|
|
|
// fast branch
|
|
|
|
if pageSize == nil {
|
|
|
|
entries, err := c.decodeAll(ctx, cursor)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build()
|
2023-06-10 16:22:14 +02:00
|
|
|
}
|
|
|
|
return entries, ct.End(), nil
|
|
|
|
}
|
|
|
|
|
2023-06-07 17:57:03 +02:00
|
|
|
entities := make([]TData, 0, cursor.RemainingBatchLength())
|
2023-06-06 21:33:49 +02:00
|
|
|
for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) {
|
|
|
|
var entry TData
|
2023-06-10 16:22:14 +02:00
|
|
|
entry, err = c.decodeSingle(ctx, cursor)
|
2023-06-06 21:33:49 +02:00
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
entities = append(entities, entry)
|
|
|
|
}
|
|
|
|
|
2023-06-07 17:57:03 +02:00
|
|
|
if pageSize == nil || len(entities) < *pageSize || !cursor.TryNext(ctx) {
|
2023-06-06 21:33:49 +02:00
|
|
|
return entities, ct.End(), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
last := entities[len(entities)-1]
|
|
|
|
|
2023-06-10 18:35:56 +02:00
|
|
|
c.EnsureInitializedReflection(last)
|
|
|
|
|
2023-06-07 17:57:03 +02:00
|
|
|
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build()
|
2023-06-07 17:57:03 +02:00
|
|
|
}
|
2023-06-06 21:33:49 +02:00
|
|
|
|
|
|
|
return entities, nextToken, nil
|
|
|
|
}
|
|
|
|
|
2023-06-07 11:28:07 +02:00
|
|
|
type countRes struct {
|
|
|
|
Count int64 `bson:"c"`
|
|
|
|
}
|
|
|
|
|
2023-06-06 21:33:49 +02:00
|
|
|
func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error) {
|
|
|
|
pipeline := filter.FilterQuery()
|
|
|
|
|
|
|
|
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
|
|
|
|
|
|
|
|
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return 0, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if cursor.Next(ctx) {
|
2023-06-07 11:28:07 +02:00
|
|
|
v := countRes{}
|
2023-06-06 21:33:49 +02:00
|
|
|
err = cursor.Decode(&v)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return 0, exerr.Wrap(err, "failed to decode entity").Build()
|
2023-06-06 21:33:49 +02:00
|
|
|
}
|
|
|
|
return v.Count, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0, nil
|
|
|
|
}
|
2023-09-18 12:56:56 +02:00
|
|
|
|
|
|
|
func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) {
|
2023-09-18 12:57:27 +02:00
|
|
|
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
|
2023-09-18 12:56:56 +02:00
|
|
|
count, err := c.Count(ctx, filter)
|
|
|
|
if err != nil {
|
|
|
|
return nil, ct.CursorToken{}, 0, err
|
|
|
|
}
|
|
|
|
|
|
|
|
data, token, err := c.List(ctx, filter, pageSize, inTok)
|
|
|
|
if err != nil {
|
|
|
|
return nil, ct.CursorToken{}, 0, err
|
|
|
|
}
|
|
|
|
return data, token, count, nil
|
|
|
|
}
|