2023-06-10 16:22:14 +02:00
|
|
|
package wmo
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2023-11-09 09:26:46 +01:00
|
|
|
"go.mongodb.org/mongo-driver/bson"
|
2023-08-21 15:08:35 +02:00
|
|
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
2023-06-10 16:22:14 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) {
|
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
var res TData
|
|
|
|
var err error
|
|
|
|
|
|
|
|
if c.customDecoder != nil {
|
|
|
|
res, err = (*c.customDecoder)(ctx, dec)
|
2023-08-21 15:08:35 +02:00
|
|
|
if err != nil {
|
2023-11-14 16:00:14 +01:00
|
|
|
return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).NoLog().Build()
|
2023-08-21 15:08:35 +02:00
|
|
|
}
|
2023-06-10 16:22:14 +02:00
|
|
|
} else {
|
2023-11-09 09:26:46 +01:00
|
|
|
err = dec.Decode(&res)
|
2023-06-10 16:22:14 +02:00
|
|
|
if err != nil {
|
2023-11-14 16:00:14 +01:00
|
|
|
return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).NoLog().Build()
|
2023-06-10 16:22:14 +02:00
|
|
|
}
|
2023-11-09 09:26:46 +01:00
|
|
|
}
|
2023-06-10 16:22:14 +02:00
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
for _, hook := range c.unmarshalHooks {
|
|
|
|
res = hook(res)
|
2023-06-10 16:22:14 +02:00
|
|
|
}
|
2023-11-09 09:26:46 +01:00
|
|
|
|
|
|
|
return res, nil
|
2023-06-10 16:22:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) {
|
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
res := make([]TData, 0, cursor.RemainingBatchLength())
|
2023-06-10 16:22:14 +02:00
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
if c.customDecoder != nil {
|
2023-06-10 16:22:14 +02:00
|
|
|
for cursor.Next(ctx) {
|
|
|
|
entry, err := (*c.customDecoder)(ctx, cursor)
|
|
|
|
if err != nil {
|
2023-08-21 15:08:35 +02:00
|
|
|
return nil, exerr.Wrap(err, "failed to decode entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
|
2023-06-10 16:22:14 +02:00
|
|
|
}
|
|
|
|
res = append(res, entry)
|
|
|
|
}
|
2023-11-09 09:26:46 +01:00
|
|
|
} else {
|
|
|
|
err := cursor.All(ctx, &res)
|
|
|
|
if err != nil {
|
|
|
|
return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for i := 0; i < len(res); i++ {
|
|
|
|
for _, hook := range c.unmarshalHooks {
|
|
|
|
res[i] = hook(res[i])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return res, nil
|
2023-06-10 16:22:14 +02:00
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) (TData, error) {
|
|
|
|
if c.extraModPipeline == nil {
|
|
|
|
|
|
|
|
// simple case, we can just decode the result and return it
|
|
|
|
return c.decodeSingle(ctx, dec)
|
2023-06-10 16:22:14 +02:00
|
|
|
|
|
|
|
} else {
|
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
// annyoing case, we have a extraModPipeline and need to re-query the document such that the extraModPipeline is applied...
|
2023-06-10 16:22:14 +02:00
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
type genDoc struct {
|
|
|
|
ID any `bson:"_id"`
|
|
|
|
}
|
|
|
|
var res genDoc
|
|
|
|
err := dec.Decode(&res)
|
2023-06-10 16:22:14 +02:00
|
|
|
if err != nil {
|
2023-11-14 16:00:14 +01:00
|
|
|
return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").NoLog().Build()
|
2023-06-10 16:22:14 +02:00
|
|
|
}
|
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false)
|
|
|
|
if err != nil {
|
2023-11-14 16:00:14 +01:00
|
|
|
return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).NoLog().Build()
|
2023-11-04 18:55:44 +01:00
|
|
|
}
|
|
|
|
|
2023-11-09 09:26:46 +01:00
|
|
|
return *v, nil
|
2023-06-10 16:22:14 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
}
|