Add various deleted flags to entities | Add active to subscriptions | Add DeleteUser && DeleteChannel endpoints [skip-tests]
All checks were successful
Build Docker and Deploy / Run Unit-Tests (push) Has been skipped
Build Docker and Deploy / Build Docker Container (push) Successful in 43s
Build Docker and Deploy / Deploy to Server (push) Successful in 16s

This commit is contained in:
Mike Schwörer 2025-04-13 16:12:15 +02:00
parent aac34ef738
commit 8c0f0e3e8f
Signed by: Mikescher
GPG Key ID: D3C7172E0A70F8CF
47 changed files with 2453 additions and 243 deletions

View File

@ -20,7 +20,7 @@
- [ ] push navigation stack - [ ] push navigation stack
- [ ] read + migrate old SharedPrefs (or not? - who uses SCN even??) - [ ] read + migrate old SharedPrefs (or not? - who uses SCN even??)
- [ ] Account-Page - [ ] Account-Page
- [ ] Logout - [x] Logout
- [x] Send-page - [x] Send-page
- [ ] Still @ERROR on scn-init, but no logs? - better persist error (write in SharedPrefs at error_$date=txt ?), also perhaps print first error line in scn-init notification? - [ ] Still @ERROR on scn-init, but no logs? - better persist error (write in SharedPrefs at error_$date=txt ?), also perhaps print first error line in scn-init notification?

View File

@ -503,7 +503,7 @@ class _AccountRootPageState extends State<AccountRootPage> {
} }
void _deleteAccount() async { void _deleteAccount() async {
Toaster.info("Not Implemented", "Account Upgrading will be implemented in a later version"); // TODO // TODO
} }
void _changeUsername() async { void _changeUsername() async {

View File

@ -151,7 +151,7 @@ class _SendRootPageState extends State<SendRootPage> {
controller: _senderName, controller: _senderName,
decoration: const InputDecoration( decoration: const InputDecoration(
border: OutlineInputBorder(), border: OutlineInputBorder(),
labelText: 'SenderName', labelText: 'Sender',
), ),
), ),
), ),

View File

@ -471,3 +471,76 @@ func (h APIHandler) ListChannelMessages(pctx ginext.PreContext) ginext.HTTPRespo
}) })
} }
// DeleteChannel swaggerdoc
//
// @Summary delete a channel (including all messages, subscriptions, etc)
// @ID api-channels-delete
// @Tags API-v2
//
// @Param uid path string true "UserID"
// @Param cid path string true "ChannelID"
//
// @Success 200 {object} models.Channel
// @Failure 400 {object} ginresp.apiError "supplied values/parameters cannot be parsed / are invalid"
// @Failure 401 {object} ginresp.apiError "user is not authorized / has missing permissions"
// @Failure 404 {object} ginresp.apiError "channel not found"
// @Failure 500 {object} ginresp.apiError "internal server error"
//
// @Router /api/v2/users/{uid}/channels/{cid} [PATCH]
func (h APIHandler) DeleteChannel(pctx ginext.PreContext) ginext.HTTPResponse {
type uri struct {
UserID models.UserID `uri:"uid" binding:"entityid"`
ChannelID models.ChannelID `uri:"cid" binding:"entityid"`
}
var u uri
ctx, g, errResp := pctx.URI(&u).Start()
if errResp != nil {
return *errResp
}
defer ctx.Cancel()
return h.app.DoRequest(ctx, g, models.TLockReadWrite, func(ctx *logic.AppContext, finishSuccess func(r ginext.HTTPResponse) ginext.HTTPResponse) ginext.HTTPResponse {
if permResp := ctx.CheckPermissionUserAdmin(u.UserID); permResp != nil {
return *permResp
}
channel, err := h.database.GetChannel(ctx, u.UserID, u.ChannelID, true)
if errors.Is(err, sql.ErrNoRows) {
return ginresp.APIError(g, 404, apierr.CHANNEL_NOT_FOUND, "Channel not found", err)
}
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to query channel", err)
}
err = h.app.Database.Primary.DeleteChannel(ctx, u.ChannelID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete channel", err)
}
err = h.app.Database.Primary.DeleteDeliveriesOfChannel(ctx, u.ChannelID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete deliveries", err)
}
err = h.app.Database.Primary.DeleteMessagesOfChannel(ctx, u.ChannelID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete messages", err)
}
err = h.app.Database.Primary.DeleteSubscriptionsByChannel(ctx, u.ChannelID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete subscriptions", err)
}
err = h.app.Database.Primary.DeleteChannelFromKeyTokens(ctx, u.ChannelID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to update keytokens", err)
}
return finishSuccess(ginext.JSON(http.StatusOK, channel))
})
}

View File

@ -90,7 +90,7 @@ func (h APIHandler) ListMessages(pctx ginext.PreContext) ginext.HTTPResponse {
} }
filter := models.MessageFilter{ filter := models.MessageFilter{
ConfirmedSubscriptionBy: langext.Ptr(userid), ConfirmedAndActiveSubscriptionBy: langext.Ptr(userid),
} }
if len(q.Search) != 0 { if len(q.Search) != 0 {

View File

@ -430,6 +430,7 @@ func (h APIHandler) UpdateSubscription(pctx ginext.PreContext) ginext.HTTPRespon
} }
type body struct { type body struct {
Confirmed *bool `form:"confirmed"` Confirmed *bool `form:"confirmed"`
Active *bool `form:"active"`
} }
var u uri var u uri
@ -460,6 +461,9 @@ func (h APIHandler) UpdateSubscription(pctx ginext.PreContext) ginext.HTTPRespon
} }
if b.Confirmed != nil { if b.Confirmed != nil {
// only channel-owner can confirm|unconfirm
if subscription.ChannelOwnerUserID != userid { if subscription.ChannelOwnerUserID != userid {
return ginresp.APIError(g, 401, apierr.USER_AUTH_FAILED, "You are not authorized for this action", nil) return ginresp.APIError(g, 401, apierr.USER_AUTH_FAILED, "You are not authorized for this action", nil)
} }
@ -469,6 +473,19 @@ func (h APIHandler) UpdateSubscription(pctx ginext.PreContext) ginext.HTTPRespon
} }
} }
if b.Active != nil {
// channel-owner AND subscriber can change active
if subscription.SubscriberUserID != u.UserID && subscription.ChannelOwnerUserID != userid {
return ginresp.APIError(g, 401, apierr.USER_AUTH_FAILED, "You are not authorized for this action", nil)
}
err = h.database.UpdateSubscriptionActive(ctx, u.SubscriptionID, *b.Active)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to update subscription", err)
}
}
subscription, err = h.database.GetSubscription(ctx, u.SubscriptionID) subscription, err = h.database.GetSubscription(ctx, u.SubscriptionID)
if err != nil { if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to query subscription", err) return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to query subscription", err)

View File

@ -274,3 +274,94 @@ func (h APIHandler) UpdateUser(pctx ginext.PreContext) ginext.HTTPResponse {
return finishSuccess(ginext.JSON(http.StatusOK, user.PreMarshal())) return finishSuccess(ginext.JSON(http.StatusOK, user.PreMarshal()))
}) })
} }
// DeleteUser swaggerdoc
//
// @Summary (Self-)Deletes a user (including all entities - all messages, channels, clients, .....)
//
// @ID api-user-delete
// @Tags API-v2
//
// @Param uid path string true "UserID"
//
// @Success 200 {object} models.User
// @Failure 400 {object} ginresp.apiError "supplied values/parameters cannot be parsed / are invalid"
// @Failure 401 {object} ginresp.apiError "user is not authorized / has missing permissions"
// @Failure 404 {object} ginresp.apiError "user not found"
// @Failure 500 {object} ginresp.apiError "internal server error"
//
// @Router /api/v2/users/{uid} [PATCH]
func (h APIHandler) DeleteUser(pctx ginext.PreContext) ginext.HTTPResponse {
type uri struct {
UserID models.UserID `uri:"uid" binding:"entityid"`
}
var u uri
ctx, g, errResp := pctx.URI(&u).Start()
if errResp != nil {
return *errResp
}
defer ctx.Cancel()
return h.app.DoRequest(ctx, g, models.TLockReadWrite, func(ctx *logic.AppContext, finishSuccess func(r ginext.HTTPResponse) ginext.HTTPResponse) ginext.HTTPResponse {
if permResp := ctx.CheckPermissionUserAdmin(u.UserID); permResp != nil {
return *permResp
}
user, err := h.database.GetUser(ctx, u.UserID)
if errors.Is(err, sql.ErrNoRows) {
return ginresp.APIError(g, 404, apierr.USER_NOT_FOUND, "User not found", err)
}
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to query user", err)
}
err = h.app.Database.Primary.DeleteUser(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete user", err)
}
err = h.app.Database.Primary.DeleteChannelsOfUser(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete channels", err)
}
err = h.app.Database.Primary.DeleteClientsOfUser(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete clients", err)
}
err = h.app.Database.Primary.DeleteDeliveriesOfUser(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete deliveries", err)
}
err = h.app.Database.Primary.DeleteKeyTokensOfUser(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete tokens", err)
}
err = h.app.Database.Primary.DeleteMessagesOfUserBySenderUserID(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete messages", err)
}
err = h.app.Database.Primary.DeleteMessagesOfUserByChannelOwnerUserID(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete messages", err)
}
err = h.app.Database.Primary.DeleteSubscriptionsOfUserBySubscriber(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete subscriptions", err)
}
err = h.app.Database.Primary.DeleteSubscriptionsOfUserByChannelOwner(ctx, u.UserID)
if err != nil {
return ginresp.APIError(g, 500, apierr.DATABASE_ERROR, "Failed to delete subscriptions", err)
}
return finishSuccess(ginext.JSON(http.StatusOK, user.PreMarshal()))
})
}

View File

@ -125,6 +125,7 @@ func (r *Router) Init(e *ginext.GinWrapper) error {
apiv2.POST("/users").Handle(r.apiHandler.CreateUser) apiv2.POST("/users").Handle(r.apiHandler.CreateUser)
apiv2.GET("/users/:uid").Handle(r.apiHandler.GetUser) apiv2.GET("/users/:uid").Handle(r.apiHandler.GetUser)
apiv2.PATCH("/users/:uid").Handle(r.apiHandler.UpdateUser) apiv2.PATCH("/users/:uid").Handle(r.apiHandler.UpdateUser)
apiv2.DELETE("/users/:uid").Handle(r.apiHandler.DeleteUser)
apiv2.GET("/users/:uid/keys").Handle(r.apiHandler.ListUserKeys) apiv2.GET("/users/:uid/keys").Handle(r.apiHandler.ListUserKeys)
apiv2.POST("/users/:uid/keys").Handle(r.apiHandler.CreateUserKey) apiv2.POST("/users/:uid/keys").Handle(r.apiHandler.CreateUserKey)
@ -143,6 +144,7 @@ func (r *Router) Init(e *ginext.GinWrapper) error {
apiv2.POST("/users/:uid/channels").Handle(r.apiHandler.CreateChannel) apiv2.POST("/users/:uid/channels").Handle(r.apiHandler.CreateChannel)
apiv2.GET("/users/:uid/channels/:cid").Handle(r.apiHandler.GetChannel) apiv2.GET("/users/:uid/channels/:cid").Handle(r.apiHandler.GetChannel)
apiv2.PATCH("/users/:uid/channels/:cid").Handle(r.apiHandler.UpdateChannel) apiv2.PATCH("/users/:uid/channels/:cid").Handle(r.apiHandler.UpdateChannel)
apiv2.DELETE("/users/:uid/channels/:cid").Handle(r.apiHandler.DeleteChannel)
apiv2.GET("/users/:uid/channels/:cid/messages").Handle(r.apiHandler.ListChannelMessages) apiv2.GET("/users/:uid/channels/:cid/messages").Handle(r.apiHandler.ListChannelMessages)
apiv2.GET("/users/:uid/channels/:cid/subscriptions").Handle(r.apiHandler.ListChannelSubscriptions) apiv2.GET("/users/:uid/channels/:cid/subscriptions").Handle(r.apiHandler.ListChannelSubscriptions)

View File

@ -182,7 +182,7 @@ func (db *Database) migrateSingle(tctx *simplectx.SimpleContext, tx sq.Tx, schem
if schemaFrom == schemaTo-1 { if schemaFrom == schemaTo-1 {
migSQL := db.schema[schemaTo].MigScript migSQL := db.schema[schemaTo].MigScript
if migSQL == "" { if len(migSQL) == 0 {
return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build() return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build()
} }
@ -192,7 +192,7 @@ func (db *Database) migrateSingle(tctx *simplectx.SimpleContext, tx sq.Tx, schem
return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build() return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build()
} }
func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts string, currSchemaVers int, resultSchemVers int, resultHash string, post func(tctx *simplectx.SimpleContext, tx sq.Tx) error) error { func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts []string, currSchemaVers int, resultSchemVers int, resultHash string, post func(tctx *simplectx.SimpleContext, tx sq.Tx) error) error {
schemaHashMeta, err := db.ReadMetaString(tctx, "schema_hash") schemaHashMeta, err := db.ReadMetaString(tctx, "schema_hash")
if err != nil { if err != nil {
@ -215,9 +215,13 @@ func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts
log.Info().Msgf("Upgrade schema from %d -> %d", currSchemaVers, resultSchemVers) log.Info().Msgf("Upgrade schema from %d -> %d", currSchemaVers, resultSchemVers)
_, err = tx.Exec(tctx, stmts, sq.PP{}) for i, stmt := range stmts {
if err != nil { log.Info().Msgf("SQL-Migration of [%s]: %d/%d", db.name, i+1, len(stmts))
return err
_, err := tx.Exec(tctx, stmt, sq.PP{})
if err != nil {
return err
}
} }
schemHashDBAfter, err := sq.HashSqliteDatabase(tctx, tx) schemHashDBAfter, err := sq.HashSqliteDatabase(tctx, tx)

View File

@ -13,7 +13,7 @@ func (db *Database) GetChannelByName(ctx db.TxContext, userid models.UserID, cha
return nil, err return nil, err
} }
return sq.QuerySingleOpt[models.Channel](ctx, tx, "SELECT * FROM channels WHERE owner_user_id = :uid AND internal_name = :nam LIMIT 1", sq.PP{"uid": userid, "nam": chanName}, sq.SModeExtended, sq.Safe) return sq.QuerySingleOpt[models.Channel](ctx, tx, "SELECT * FROM channels WHERE owner_user_id = :uid AND internal_name = :nam AND deleted=0 LIMIT 1", sq.PP{"uid": userid, "nam": chanName}, sq.SModeExtended, sq.Safe)
} }
func (db *Database) GetChannelByID(ctx db.TxContext, chanid models.ChannelID) (*models.Channel, error) { func (db *Database) GetChannelByID(ctx db.TxContext, chanid models.ChannelID) (*models.Channel, error) {
@ -22,7 +22,7 @@ func (db *Database) GetChannelByID(ctx db.TxContext, chanid models.ChannelID) (*
return nil, err return nil, err
} }
return sq.QuerySingleOpt[models.Channel](ctx, tx, "SELECT * FROM channels WHERE channel_id = :cid LIMIT 1", sq.PP{"cid": chanid}, sq.SModeExtended, sq.Safe) return sq.QuerySingleOpt[models.Channel](ctx, tx, "SELECT * FROM channels WHERE channel_id = :cid AND deleted=0 LIMIT 1", sq.PP{"cid": chanid}, sq.SModeExtended, sq.Safe)
} }
type CreateChanel struct { type CreateChanel struct {
@ -49,6 +49,7 @@ func (db *Database) CreateChannel(ctx db.TxContext, userid models.UserID, dispNa
TimestampCreated: models.NowSCNTime(), TimestampCreated: models.NowSCNTime(),
TimestampLastSent: nil, TimestampLastSent: nil,
MessagesSent: 0, MessagesSent: 0,
Deleted: false,
} }
_, err = sq.InsertSingle(ctx, tx, "channels", entity) _, err = sq.InsertSingle(ctx, tx, "channels", entity)
@ -67,7 +68,7 @@ func (db *Database) ListChannelsByOwner(ctx db.TxContext, userid models.UserID,
order := " ORDER BY channels.timestamp_created ASC, channels.channel_id ASC " order := " ORDER BY channels.timestamp_created ASC, channels.channel_id ASC "
sql := "SELECT channels.*, sub.* FROM channels LEFT JOIN subscriptions AS sub ON channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid WHERE owner_user_id = :ouid" + order sql := "SELECT channels.*, sub.* FROM channels LEFT JOIN subscriptions AS sub ON channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid WHERE channels.deleted=0 AND owner_user_id = :ouid" + order
pp := sq.PP{ pp := sq.PP{
"ouid": userid, "ouid": userid,
@ -90,9 +91,11 @@ func (db *Database) ListChannelsBySubscriber(ctx db.TxContext, userid models.Use
confCond = " AND sub.confirmed = 0" confCond = " AND sub.confirmed = 0"
} }
cond := "channels.deleted=0 AND sub.subscription_id IS NOT NULL " + confCond
order := " ORDER BY channels.timestamp_created ASC, channels.channel_id ASC " order := " ORDER BY channels.timestamp_created ASC, channels.channel_id ASC "
sql := "SELECT channels.*, sub.* FROM channels LEFT JOIN subscriptions AS sub on channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid WHERE sub.subscription_id IS NOT NULL " + confCond + order sql := "SELECT channels.*, sub.* " + "FROM channels LEFT JOIN subscriptions AS sub on channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid AND sub.deleted=0 WHERE " + cond + order
pp := sq.PP{ pp := sq.PP{
"subuid": userid, "subuid": userid,
@ -114,9 +117,11 @@ func (db *Database) ListChannelsByAccess(ctx db.TxContext, userid models.UserID,
confCond = "OR (sub.subscription_id IS NOT NULL AND sub.confirmed = 0)" confCond = "OR (sub.subscription_id IS NOT NULL AND sub.confirmed = 0)"
} }
cond := "channels.deleted=0 AND (owner_user_id = :ouid " + confCond + ")"
order := " ORDER BY channels.timestamp_created ASC, channels.channel_id ASC " order := " ORDER BY channels.timestamp_created ASC, channels.channel_id ASC "
sql := "SELECT channels.*, sub.* FROM channels LEFT JOIN subscriptions AS sub on channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid WHERE owner_user_id = :ouid " + confCond + order sql := "SELECT channels.*, sub.* " + "FROM channels LEFT JOIN subscriptions AS sub on channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid AND sub.deleted=0 WHERE " + cond + order
pp := sq.PP{ pp := sq.PP{
"ouid": userid, "ouid": userid,
@ -139,13 +144,14 @@ func (db *Database) GetChannel(ctx db.TxContext, userid models.UserID, channelid
selectors := "channels.*, sub.*" selectors := "channels.*, sub.*"
join := "LEFT JOIN subscriptions AS sub on channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid" join := "LEFT JOIN subscriptions AS sub on channels.channel_id = sub.channel_id AND sub.subscriber_user_id = :subuid AND sub.deleted=0"
cond := "channels.channel_id = :cid" cond := "channels.channel_id = :cid"
if enforceOwner { if enforceOwner {
cond = "owner_user_id = :ouid AND channels.channel_id = :cid" cond = "owner_user_id = :ouid AND channels.channel_id = :cid"
params["ouid"] = userid params["ouid"] = userid
} }
cond += " AND channels.deleted=0"
sql := "SELECT " + selectors + " FROM channels " + join + " WHERE " + cond + " LIMIT 1" sql := "SELECT " + selectors + " FROM channels " + join + " WHERE " + cond + " LIMIT 1"
@ -160,7 +166,7 @@ func (db *Database) IncChannelMessageCounter(ctx db.TxContext, channel *models.C
now := time.Now() now := time.Now()
_, err = tx.Exec(ctx, "UPDATE channels SET messages_sent = messages_sent+1, timestamp_lastsent = :ts WHERE channel_id = :cid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE channels SET messages_sent = messages_sent+1, timestamp_lastsent = :ts WHERE channel_id = :cid AND deleted=0", sq.PP{
"ts": time2DB(now), "ts": time2DB(now),
"cid": channel.ChannelID, "cid": channel.ChannelID,
}) })
@ -180,7 +186,7 @@ func (db *Database) UpdateChannelSubscribeKey(ctx db.TxContext, channelid models
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE channels SET subscribe_key = :key WHERE channel_id = :cid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE channels SET subscribe_key = :key WHERE channel_id = :cid AND deleted=0", sq.PP{
"key": newkey, "key": newkey,
"cid": channelid, "cid": channelid,
}) })
@ -197,7 +203,7 @@ func (db *Database) UpdateChannelDisplayName(ctx db.TxContext, channelid models.
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE channels SET display_name = :nam WHERE channel_id = :cid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE channels SET display_name = :nam WHERE channel_id = :cid AND deleted=0", sq.PP{
"nam": dispname, "nam": dispname,
"cid": channelid, "cid": channelid,
}) })
@ -214,7 +220,7 @@ func (db *Database) UpdateChannelDescriptionName(ctx db.TxContext, channelid mod
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE channels SET description_name = :nam WHERE channel_id = :cid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE channels SET description_name = :nam WHERE channel_id = :cid AND deleted=0", sq.PP{
"nam": descname, "nam": descname,
"cid": channelid, "cid": channelid,
}) })
@ -224,3 +230,31 @@ func (db *Database) UpdateChannelDescriptionName(ctx db.TxContext, channelid mod
return nil return nil
} }
func (db *Database) DeleteChannelsOfUser(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE channels SET deleted=1 WHERE owner_user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteChannel(ctx db.TxContext, channelid models.ChannelID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE channels SET deleted=1 WHERE channel_id = :cid AND deleted=0", sq.PP{"cid": channelid})
if err != nil {
return err
}
return nil
}

View File

@ -21,6 +21,7 @@ func (db *Database) CreateClient(ctx db.TxContext, userid models.UserID, ctype m
AgentModel: agentModel, AgentModel: agentModel,
AgentVersion: agentVersion, AgentVersion: agentVersion,
Name: name, Name: name,
Deleted: false,
} }
_, err = sq.InsertSingle(ctx, tx, "clients", entity) _, err = sq.InsertSingle(ctx, tx, "clients", entity)
@ -159,3 +160,17 @@ func (db *Database) UpdateClientDescriptionName(ctx db.TxContext, clientid model
return nil return nil
} }
func (db *Database) DeleteClientsOfUser(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE clients SET deleted=1 WHERE user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}

View File

@ -182,7 +182,7 @@ func (db *Database) migrateSingle(tctx *simplectx.SimpleContext, tx sq.Tx, schem
if schemaFrom == schemaTo-1 { if schemaFrom == schemaTo-1 {
migSQL := db.schema[schemaTo].MigScript migSQL := db.schema[schemaTo].MigScript
if migSQL == "" { if len(migSQL) == 0 {
return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build() return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build()
} }
@ -192,7 +192,7 @@ func (db *Database) migrateSingle(tctx *simplectx.SimpleContext, tx sq.Tx, schem
return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build() return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build()
} }
func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts string, currSchemaVers int, resultSchemVers int, resultHash string, post func(tctx *simplectx.SimpleContext, tx sq.Tx) error) error { func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts []string, currSchemaVers int, resultSchemVers int, resultHash string, post func(tctx *simplectx.SimpleContext, tx sq.Tx) error) error {
schemaHashMeta, err := db.ReadMetaString(tctx, "schema_hash") schemaHashMeta, err := db.ReadMetaString(tctx, "schema_hash")
if err != nil { if err != nil {
@ -215,9 +215,13 @@ func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts
log.Info().Msgf("Upgrade schema from %d -> %d", currSchemaVers, resultSchemVers) log.Info().Msgf("Upgrade schema from %d -> %d", currSchemaVers, resultSchemVers)
_, err = tx.Exec(tctx, stmts, sq.PP{}) for i, stmt := range stmts {
if err != nil { log.Info().Msgf("SQL-Migration of [%s]: %d/%d", db.name, i+1, len(stmts))
return err
_, err := tx.Exec(tctx, stmt, sq.PP{})
if err != nil {
return err
}
} }
schemHashDBAfter, err := sq.HashSqliteDatabase(tctx, tx) schemHashDBAfter, err := sq.HashSqliteDatabase(tctx, tx)

View File

@ -29,6 +29,7 @@ func (db *Database) CreateRetryDelivery(ctx db.TxContext, client models.Client,
RetryCount: 0, RetryCount: 0,
NextDelivery: models.NewSCNTimePtr(&next), NextDelivery: models.NewSCNTimePtr(&next),
FCMMessageID: nil, FCMMessageID: nil,
Deleted: false,
} }
_, err = sq.InsertSingle(ctx, tx, "deliveries", entity) _, err = sq.InsertSingle(ctx, tx, "deliveries", entity)
@ -58,6 +59,7 @@ func (db *Database) CreateSuccessDelivery(ctx db.TxContext, client models.Client
RetryCount: 0, RetryCount: 0,
NextDelivery: nil, NextDelivery: nil,
FCMMessageID: langext.Ptr(fcmDelivID), FCMMessageID: langext.Ptr(fcmDelivID),
Deleted: false,
} }
_, err = sq.InsertSingle(ctx, tx, "deliveries", entity) _, err = sq.InsertSingle(ctx, tx, "deliveries", entity)
@ -74,7 +76,7 @@ func (db *Database) ListRetrieableDeliveries(ctx db.TxContext, pageSize int) ([]
return nil, err return nil, err
} }
return sq.QueryAll[models.Delivery](ctx, tx, "SELECT * FROM deliveries WHERE status = 'RETRY' AND next_delivery < :next ORDER BY next_delivery ASC LIMIT :lim", sq.PP{ return sq.QueryAll[models.Delivery](ctx, tx, "SELECT * FROM deliveries WHERE status = 'RETRY' AND next_delivery < :next AND deleted=0 ORDER BY next_delivery ASC LIMIT :lim", sq.PP{
"next": time2DB(time.Now()), "next": time2DB(time.Now()),
"lim": pageSize, "lim": pageSize,
}, sq.SModeExtended, sq.Safe) }, sq.SModeExtended, sq.Safe)
@ -86,7 +88,7 @@ func (db *Database) SetDeliverySuccess(ctx db.TxContext, delivery models.Deliver
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'SUCCESS', next_delivery = NULL, retry_count = :rc, timestamp_finalized = :ts, fcm_message_id = :fcm WHERE delivery_id = :did", sq.PP{ _, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'SUCCESS', next_delivery = NULL, retry_count = :rc, timestamp_finalized = :ts, fcm_message_id = :fcm WHERE delivery_id = :did AND deleted=0", sq.PP{
"rc": delivery.RetryCount + 1, "rc": delivery.RetryCount + 1,
"ts": time2DB(time.Now()), "ts": time2DB(time.Now()),
"fcm": fcmDelivID, "fcm": fcmDelivID,
@ -105,7 +107,7 @@ func (db *Database) SetDeliveryFailed(ctx db.TxContext, delivery models.Delivery
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'FAILED', next_delivery = NULL, retry_count = :rc, timestamp_finalized = :ts WHERE delivery_id = :did", _, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'FAILED', next_delivery = NULL, retry_count = :rc, timestamp_finalized = :ts WHERE delivery_id = :did AND deleted=0",
sq.PP{ sq.PP{
"rc": delivery.RetryCount + 1, "rc": delivery.RetryCount + 1,
"ts": time2DB(time.Now()), "ts": time2DB(time.Now()),
@ -124,7 +126,7 @@ func (db *Database) SetDeliveryRetry(ctx db.TxContext, delivery models.Delivery)
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'RETRY', next_delivery = :next, retry_count = :rc WHERE delivery_id = :did", sq.PP{ _, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'RETRY', next_delivery = :next, retry_count = :rc WHERE delivery_id = :did AND deleted=0", sq.PP{
"next": time2DB(scn.NextDeliveryTimestamp(time.Now())), "next": time2DB(scn.NextDeliveryTimestamp(time.Now())),
"rc": delivery.RetryCount + 1, "rc": delivery.RetryCount + 1,
"did": delivery.DeliveryID, "did": delivery.DeliveryID,
@ -142,7 +144,7 @@ func (db *Database) CancelPendingDeliveries(ctx db.TxContext, messageID models.M
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'FAILED', next_delivery = NULL, timestamp_finalized = :ts WHERE message_id = :mid AND status = 'RETRY'", sq.PP{ _, err = tx.Exec(ctx, "UPDATE deliveries SET status = 'FAILED', next_delivery = NULL, timestamp_finalized = :ts WHERE message_id = :mid AND status = 'RETRY' AND deleted=0", sq.PP{
"ts": time.Now(), "ts": time.Now(),
"mid": messageID, "mid": messageID,
}) })
@ -152,3 +154,31 @@ func (db *Database) CancelPendingDeliveries(ctx db.TxContext, messageID models.M
return nil return nil
} }
func (db *Database) DeleteDeliveriesOfUser(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE deliveries SET deleted=1 WHERE receiver_user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteDeliveriesOfChannel(ctx db.TxContext, channelid models.ChannelID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE deliveries SET deleted=1 WHERE (SELECT channel_id FROM messages WHERE messages.message_id = deliveries.message_id) = :cid AND deleted=0", sq.PP{"cid": channelid})
if err != nil {
return err
}
return nil
}

View File

@ -26,6 +26,7 @@ func (db *Database) CreateKeyToken(ctx db.TxContext, name string, owner models.U
Token: token, Token: token,
Permissions: permissions, Permissions: permissions,
MessagesSent: 0, MessagesSent: 0,
Deleted: false,
} }
_, err = sq.InsertSingle(ctx, tx, "keytokens", entity) _, err = sq.InsertSingle(ctx, tx, "keytokens", entity)
@ -42,7 +43,7 @@ func (db *Database) ListKeyTokens(ctx db.TxContext, ownerID models.UserID) ([]mo
return nil, err return nil, err
} }
return sq.QueryAll[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE owner_user_id = :uid ORDER BY keytokens.timestamp_created DESC, keytokens.keytoken_id ASC", sq.PP{"uid": ownerID}, sq.SModeExtended, sq.Safe) return sq.QueryAll[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE owner_user_id = :uid AND deleted=0 ORDER BY keytokens.timestamp_created DESC, keytokens.keytoken_id ASC", sq.PP{"uid": ownerID}, sq.SModeExtended, sq.Safe)
} }
func (db *Database) GetKeyToken(ctx db.TxContext, userid models.UserID, keyTokenid models.KeyTokenID) (models.KeyToken, error) { func (db *Database) GetKeyToken(ctx db.TxContext, userid models.UserID, keyTokenid models.KeyTokenID) (models.KeyToken, error) {
@ -51,7 +52,7 @@ func (db *Database) GetKeyToken(ctx db.TxContext, userid models.UserID, keyToken
return models.KeyToken{}, err return models.KeyToken{}, err
} }
return sq.QuerySingle[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE owner_user_id = :uid AND keytoken_id = :cid LIMIT 1", sq.PP{ return sq.QuerySingle[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE owner_user_id = :uid AND keytoken_id = :cid AND deleted=0 LIMIT 1", sq.PP{
"uid": userid, "uid": userid,
"cid": keyTokenid, "cid": keyTokenid,
}, sq.SModeExtended, sq.Safe) }, sq.SModeExtended, sq.Safe)
@ -63,7 +64,7 @@ func (db *Database) GetKeyTokenByID(ctx db.TxContext, keyTokenid models.KeyToken
return models.KeyToken{}, err return models.KeyToken{}, err
} }
return sq.QuerySingle[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE keytoken_id = :cid LIMIT 1", sq.PP{"cid": keyTokenid}, sq.SModeExtended, sq.Safe) return sq.QuerySingle[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE keytoken_id = :cid AND deleted=0 LIMIT 1", sq.PP{"cid": keyTokenid}, sq.SModeExtended, sq.Safe)
} }
func (db *Database) GetKeyTokenByToken(ctx db.TxContext, key string) (*models.KeyToken, error) { func (db *Database) GetKeyTokenByToken(ctx db.TxContext, key string) (*models.KeyToken, error) {
@ -72,7 +73,7 @@ func (db *Database) GetKeyTokenByToken(ctx db.TxContext, key string) (*models.Ke
return nil, err return nil, err
} }
return sq.QuerySingleOpt[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE token = :key LIMIT 1", sq.PP{"key": key}, sq.SModeExtended, sq.Safe) return sq.QuerySingleOpt[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE token = :key AND deleted=0 LIMIT 1", sq.PP{"key": key}, sq.SModeExtended, sq.Safe)
} }
func (db *Database) DeleteKeyToken(ctx db.TxContext, keyTokenid models.KeyTokenID) error { func (db *Database) DeleteKeyToken(ctx db.TxContext, keyTokenid models.KeyTokenID) error {
@ -81,7 +82,7 @@ func (db *Database) DeleteKeyToken(ctx db.TxContext, keyTokenid models.KeyTokenI
return err return err
} }
_, err = tx.Exec(ctx, "DELETE FROM keytokens WHERE keytoken_id = :tid", sq.PP{"tid": keyTokenid}) _, err = tx.Exec(ctx, "UPDATE keytokens SET deleted=1 WHERE keytoken_id = :tid AND deleted=0", sq.PP{"tid": keyTokenid})
if err != nil { if err != nil {
return err return err
} }
@ -95,7 +96,7 @@ func (db *Database) UpdateKeyTokenName(ctx db.TxContext, keyTokenid models.KeyTo
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE keytokens SET name = :nam WHERE keytoken_id = :tid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE keytokens SET name = :nam WHERE keytoken_id = :tid AND deleted=0", sq.PP{
"nam": name, "nam": name,
"tid": keyTokenid, "tid": keyTokenid,
}) })
@ -112,7 +113,7 @@ func (db *Database) UpdateKeyTokenPermissions(ctx db.TxContext, keyTokenid model
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE keytokens SET permissions = :prm WHERE keytoken_id = :tid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE keytokens SET permissions = :prm WHERE keytoken_id = :tid AND deleted=0", sq.PP{
"tid": keyTokenid, "tid": keyTokenid,
"prm": perm.String(), "prm": perm.String(),
}) })
@ -129,7 +130,7 @@ func (db *Database) UpdateKeyTokenAllChannels(ctx db.TxContext, keyTokenid model
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE keytokens SET all_channels = :all WHERE keytoken_id = :tid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE keytokens SET all_channels = :all WHERE keytoken_id = :tid AND deleted=0", sq.PP{
"tid": keyTokenid, "tid": keyTokenid,
"all": bool2DB(allChannels), "all": bool2DB(allChannels),
}) })
@ -146,7 +147,7 @@ func (db *Database) UpdateKeyTokenChannels(ctx db.TxContext, keyTokenid models.K
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE keytokens SET channels = :cha WHERE keytoken_id = :tid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE keytokens SET channels = :cha WHERE keytoken_id = :tid AND deleted=0", sq.PP{
"tid": keyTokenid, "tid": keyTokenid,
"cha": strings.Join(langext.ArrMap(channels, func(v models.ChannelID) string { return v.String() }), ";"), "cha": strings.Join(langext.ArrMap(channels, func(v models.ChannelID) string { return v.String() }), ";"),
}) })
@ -165,7 +166,7 @@ func (db *Database) IncKeyTokenMessageCounter(ctx db.TxContext, keyToken *models
now := time.Now() now := time.Now()
_, err = tx.Exec(ctx, "UPDATE keytokens SET messages_sent = messages_sent+1, timestamp_lastused = :ts WHERE keytoken_id = :tid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE keytokens SET messages_sent = messages_sent+1, timestamp_lastused = :ts WHERE keytoken_id = :tid AND deleted=0", sq.PP{
"ts": time2DB(now), "ts": time2DB(now),
"tid": keyToken.KeyTokenID, "tid": keyToken.KeyTokenID,
}) })
@ -185,7 +186,7 @@ func (db *Database) UpdateKeyTokenLastUsed(ctx db.TxContext, keyTokenid models.K
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE keytokens SET timestamp_lastused = :ts WHERE keytoken_id = :tid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE keytokens SET timestamp_lastused = :ts WHERE keytoken_id = :tid AND deleted=0", sq.PP{
"ts": time2DB(time.Now()), "ts": time2DB(time.Now()),
"tid": keyTokenid, "tid": keyTokenid,
}) })
@ -195,3 +196,46 @@ func (db *Database) UpdateKeyTokenLastUsed(ctx db.TxContext, keyTokenid models.K
return nil return nil
} }
func (db *Database) DeleteKeyTokensOfUser(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE keytokens SET deleted=1 WHERE owner_user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteChannelFromKeyTokens(ctx db.TxContext, channelid models.ChannelID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
tok, err := sq.QueryAll[models.KeyToken](ctx, tx, "SELECT * FROM keytokens WHERE instr(channels, :cid) AND deleted=0", sq.PP{"cid": channelid}, sq.SModeExtended, sq.Safe)
if err != nil {
return err
}
for _, t := range tok {
newChanList := models.ChannelIDArr(langext.ArrRemove(t.Channels, channelid))
chanListDB, err := newChanList.MarshalToDB(newChanList)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE keytokens SET channels=:clist WHERE owner_user_id = :uid AND deleted=0", sq.PP{"clist": chanListDB})
if err != nil {
return err
}
}
return nil
}

View File

@ -15,7 +15,7 @@ func (db *Database) GetMessageByUserMessageID(ctx db.TxContext, usrMsgId string)
return nil, err return nil, err
} }
return sq.QuerySingleOpt[models.Message](ctx, tx, "SELECT * FROM messages WHERE usr_message_id = :umid LIMIT 1", sq.PP{"umid": usrMsgId}, sq.SModeExtended, sq.Safe) return sq.QuerySingleOpt[models.Message](ctx, tx, "SELECT * FROM messages WHERE usr_message_id = :umid LIMIT 1", sq.PP{"umid": usrMsgId}, sq.SModeExtended, sq.Safe) // no deleted=0 check!
} }
func (db *Database) GetMessage(ctx db.TxContext, scnMessageID models.MessageID, allowDeleted bool) (models.Message, error) { func (db *Database) GetMessage(ctx db.TxContext, scnMessageID models.MessageID, allowDeleted bool) (models.Message, error) {
@ -45,6 +45,7 @@ func (db *Database) CreateMessage(ctx db.TxContext, senderUserID models.UserID,
SenderUserID: senderUserID, SenderUserID: senderUserID,
ChannelInternalName: channel.InternalName, ChannelInternalName: channel.InternalName,
ChannelID: channel.ChannelID, ChannelID: channel.ChannelID,
ChannelOwnerUserID: channel.OwnerUserID,
SenderIP: senderIP, SenderIP: senderIP,
SenderName: senderName, SenderName: senderName,
TimestampReal: models.NowSCNTime(), TimestampReal: models.NowSCNTime(),
@ -164,3 +165,45 @@ func (db *Database) CountMessages(ctx db.TxContext, filter models.MessageFilter)
return countRes, nil return countRes, nil
} }
func (db *Database) DeleteMessagesOfUserBySenderUserID(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE messages SET deleted=1 WHERE sender_user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteMessagesOfUserByChannelOwnerUserID(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE messages SET deleted=1 WHERE channel_owner_user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteMessagesOfChannel(ctx db.TxContext, channelid models.ChannelID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE messages SET deleted=1 WHERE channel_id = :cid AND deleted=0", sq.PP{"cid": channelid})
if err != nil {
return err
}
return nil
}

View File

@ -17,7 +17,7 @@ func (db *Database) ListSenderNames(ctx db.TxContext, userid models.UserID, incl
prepParams := sq.PP{"uid": userid} prepParams := sq.PP{"uid": userid}
if includeForeignSubscribed { if includeForeignSubscribed {
sqlStr = "SELECT sender_name AS name, MAX(timestamp_real) AS ts_last, MIN(timestamp_real) AS ts_first, COUNT(*) AS count FROM messages LEFT JOIN subscriptions AS subs on messages.channel_id = subs.channel_id WHERE (subs.subscriber_user_id = :uid AND subs.confirmed = 1) AND sender_NAME NOT NULL GROUP BY sender_name ORDER BY ts_last DESC" sqlStr = "SELECT sender_name AS name, MAX(timestamp_real) AS ts_last, MIN(timestamp_real) AS ts_first, COUNT(*) AS count FROM messages LEFT JOIN subscriptions AS subs ON messages.channel_id = subs.channel_id AND subs.deleted=0 WHERE (subs.subscriber_user_id = :uid AND subs.confirmed = 1) AND sender_NAME NOT NULL GROUP BY sender_name ORDER BY ts_last DESC"
} else { } else {
sqlStr = "SELECT sender_name AS name, MAX(timestamp_real) AS ts_last, MIN(timestamp_real) AS ts_first, COUNT(*) AS count FROM messages WHERE sender_user_id = :uid AND sender_NAME NOT NULL GROUP BY sender_name ORDER BY ts_last DESC" sqlStr = "SELECT sender_name AS name, MAX(timestamp_real) AS ts_last, MIN(timestamp_real) AS ts_first, COUNT(*) AS count FROM messages WHERE sender_user_id = :uid AND sender_NAME NOT NULL GROUP BY sender_name ORDER BY ts_last DESC"
} }

View File

@ -20,6 +20,8 @@ func (db *Database) CreateSubscription(ctx db.TxContext, subscriberUID models.Us
ChannelInternalName: channel.InternalName, ChannelInternalName: channel.InternalName,
TimestampCreated: models.NowSCNTime(), TimestampCreated: models.NowSCNTime(),
Confirmed: confirmed, Confirmed: confirmed,
Active: true,
Deleted: false,
} }
_, err = sq.InsertSingle(ctx, tx, "subscriptions", entity) _, err = sq.InsertSingle(ctx, tx, "subscriptions", entity)
@ -40,7 +42,7 @@ func (db *Database) ListSubscriptions(ctx db.TxContext, filter models.Subscripti
orderClause := " ORDER BY subscriptions.timestamp_created DESC, subscriptions.subscription_id DESC " orderClause := " ORDER BY subscriptions.timestamp_created DESC, subscriptions.subscription_id DESC "
sqlQuery := "SELECT " + "subscriptions.*" + " FROM subscriptions " + filterJoin + " WHERE ( " + filterCond + " ) " + orderClause sqlQuery := "SELECT " + "subscriptions.*" + " FROM subscriptions " + filterJoin + " WHERE ( " + filterCond + " ) AND subscriptions.deleted=0 " + orderClause
return sq.QueryAll[models.Subscription](ctx, tx, sqlQuery, prepParams, sq.SModeExtended, sq.Safe) return sq.QueryAll[models.Subscription](ctx, tx, sqlQuery, prepParams, sq.SModeExtended, sq.Safe)
} }
@ -51,7 +53,7 @@ func (db *Database) GetSubscription(ctx db.TxContext, subid models.SubscriptionI
return models.Subscription{}, err return models.Subscription{}, err
} }
return sq.QuerySingle[models.Subscription](ctx, tx, "SELECT * FROM subscriptions WHERE subscription_id = :sid LIMIT 1", sq.PP{"sid": subid}, sq.SModeExtended, sq.Safe) return sq.QuerySingle[models.Subscription](ctx, tx, "SELECT * FROM subscriptions WHERE subscription_id = :sid AND deleted=0 LIMIT 1", sq.PP{"sid": subid}, sq.SModeExtended, sq.Safe)
} }
func (db *Database) GetSubscriptionBySubscriber(ctx db.TxContext, subscriberId models.UserID, channelId models.ChannelID) (*models.Subscription, error) { func (db *Database) GetSubscriptionBySubscriber(ctx db.TxContext, subscriberId models.UserID, channelId models.ChannelID) (*models.Subscription, error) {
@ -60,7 +62,7 @@ func (db *Database) GetSubscriptionBySubscriber(ctx db.TxContext, subscriberId m
return nil, err return nil, err
} }
return sq.QuerySingleOpt[models.Subscription](ctx, tx, "SELECT * FROM subscriptions WHERE subscriber_user_id = :suid AND channel_id = :cid LIMIT 1", sq.PP{ return sq.QuerySingleOpt[models.Subscription](ctx, tx, "SELECT * FROM subscriptions WHERE subscriber_user_id = :suid AND channel_id = :cid AND deleted=0 LIMIT 1", sq.PP{
"suid": subscriberId, "suid": subscriberId,
"cid": channelId, "cid": channelId,
}, sq.SModeExtended, sq.Safe) }, sq.SModeExtended, sq.Safe)
@ -72,7 +74,7 @@ func (db *Database) DeleteSubscription(ctx db.TxContext, subid models.Subscripti
return err return err
} }
_, err = tx.Exec(ctx, "DELETE FROM subscriptions WHERE subscription_id = :sid", sq.PP{"sid": subid}) _, err = tx.Exec(ctx, "UPDATE subscriptions SET deleted=1 WHERE subscription_id = :sid AND deleted=0", sq.PP{"sid": subid})
if err != nil { if err != nil {
return err return err
} }
@ -86,7 +88,7 @@ func (db *Database) UpdateSubscriptionConfirmed(ctx db.TxContext, subscriptionID
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE subscriptions SET confirmed = :conf WHERE subscription_id = :sid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE subscriptions SET confirmed = :conf WHERE subscription_id = :sid AND deleted=0", sq.PP{
"conf": confirmed, "conf": confirmed,
"sid": subscriptionID, "sid": subscriptionID,
}) })
@ -96,3 +98,62 @@ func (db *Database) UpdateSubscriptionConfirmed(ctx db.TxContext, subscriptionID
return nil return nil
} }
func (db *Database) UpdateSubscriptionActive(ctx db.TxContext, subscriptionID models.SubscriptionID, active bool) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE subscriptions SET active = :act WHERE subscription_id = :sid AND deleted=0", sq.PP{
"act": active,
"sid": subscriptionID,
})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteSubscriptionsOfUserByChannelOwner(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE subscriptions SET deleted=1 WHERE channel_owner_user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteSubscriptionsOfUserBySubscriber(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE subscriptions SET deleted=1 WHERE subscriber_user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}
func (db *Database) DeleteSubscriptionsByChannel(ctx db.TxContext, channelid models.ChannelID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE subscriptions SET deleted=1 WHERE channel_id = :cid AND deleted=0", sq.PP{"cid": channelid})
if err != nil {
return err
}
return nil
}

View File

@ -27,6 +27,7 @@ func (db *Database) CreateUser(ctx db.TxContext, protoken *string, username *str
IsPro: protoken != nil, IsPro: protoken != nil,
ProToken: protoken, ProToken: protoken,
UserExtra: models.UserExtra{}, UserExtra: models.UserExtra{},
Deleted: false,
} }
entity.PreMarshal() entity.PreMarshal()
@ -45,7 +46,7 @@ func (db *Database) ClearProTokens(ctx db.TxContext, protoken string) error {
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE users SET is_pro=0, pro_token=NULL WHERE pro_token = :tok", sq.PP{"tok": protoken}) _, err = tx.Exec(ctx, "UPDATE users SET is_pro=0, pro_token=NULL WHERE pro_token = :tok AND deleted=0", sq.PP{"tok": protoken})
if err != nil { if err != nil {
return err return err
} }
@ -59,7 +60,7 @@ func (db *Database) GetUser(ctx db.TxContext, userid models.UserID) (models.User
return models.User{}, err return models.User{}, err
} }
return sq.QuerySingle[models.User](ctx, tx, "SELECT * FROM users WHERE user_id = :uid LIMIT 1", sq.PP{"uid": userid}, sq.SModeExtended, sq.Safe) return sq.QuerySingle[models.User](ctx, tx, "SELECT * FROM users WHERE user_id = :uid AND deleted=0 LIMIT 1", sq.PP{"uid": userid}, sq.SModeExtended, sq.Safe)
} }
func (db *Database) GetUserOpt(ctx db.TxContext, userid models.UserID) (*models.User, error) { func (db *Database) GetUserOpt(ctx db.TxContext, userid models.UserID) (*models.User, error) {
@ -68,7 +69,7 @@ func (db *Database) GetUserOpt(ctx db.TxContext, userid models.UserID) (*models.
return nil, err return nil, err
} }
return sq.QuerySingleOpt[models.User](ctx, tx, "SELECT * FROM users WHERE user_id = :uid LIMIT 1", sq.PP{"uid": userid}, sq.SModeExtended, sq.Safe) return sq.QuerySingleOpt[models.User](ctx, tx, "SELECT * FROM users WHERE user_id = :uid AND deleted=0 LIMIT 1", sq.PP{"uid": userid}, sq.SModeExtended, sq.Safe)
} }
func (db *Database) UpdateUserUsername(ctx db.TxContext, userid models.UserID, username *string) error { func (db *Database) UpdateUserUsername(ctx db.TxContext, userid models.UserID, username *string) error {
@ -77,7 +78,7 @@ func (db *Database) UpdateUserUsername(ctx db.TxContext, userid models.UserID, u
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE users SET username = :nam WHERE user_id = :uid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE users SET username = :nam WHERE user_id = :uid AND deleted=0", sq.PP{
"nam": username, "nam": username,
"uid": userid, "uid": userid,
}) })
@ -94,7 +95,7 @@ func (db *Database) UpdateUserProToken(ctx db.TxContext, userid models.UserID, p
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE users SET pro_token = :tok, is_pro = :pro WHERE user_id = :uid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE users SET pro_token = :tok, is_pro = :pro WHERE user_id = :uid AND deleted=0", sq.PP{
"tok": protoken, "tok": protoken,
"pro": bool2DB(protoken != nil), "pro": bool2DB(protoken != nil),
"uid": userid, "uid": userid,
@ -119,7 +120,7 @@ func (db *Database) IncUserMessageCounter(ctx db.TxContext, user *models.User) e
user.QuotaUsed = quota user.QuotaUsed = quota
user.QuotaUsedDay = langext.Ptr(scn.QuotaDayString()) user.QuotaUsedDay = langext.Ptr(scn.QuotaDayString())
_, err = tx.Exec(ctx, "UPDATE users SET timestamp_lastsent = :ts, messages_sent = messages_sent+1, quota_used = :qu, quota_used_day = :qd WHERE user_id = :uid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE users SET timestamp_lastsent = :ts, messages_sent = messages_sent+1, quota_used = :qu, quota_used_day = :qd WHERE user_id = :uid AND deleted=0", sq.PP{
"ts": time2DB(now), "ts": time2DB(now),
"qu": user.QuotaUsed, "qu": user.QuotaUsed,
"qd": user.QuotaUsedDay, "qd": user.QuotaUsedDay,
@ -141,7 +142,7 @@ func (db *Database) UpdateUserLastRead(ctx db.TxContext, userid models.UserID) e
return err return err
} }
_, err = tx.Exec(ctx, "UPDATE users SET timestamp_lastread = :ts WHERE user_id = :uid", sq.PP{ _, err = tx.Exec(ctx, "UPDATE users SET timestamp_lastread = :ts WHERE user_id = :uid AND deleted=0", sq.PP{
"ts": time2DB(time.Now()), "ts": time2DB(time.Now()),
"uid": userid, "uid": userid,
}) })
@ -151,3 +152,17 @@ func (db *Database) UpdateUserLastRead(ctx db.TxContext, userid models.UserID) e
return nil return nil
} }
func (db *Database) DeleteUser(ctx db.TxContext, userid models.UserID) error {
tx, err := ctx.GetOrCreateTransaction(db)
if err != nil {
return err
}
_, err = tx.Exec(ctx, "UPDATE users SET deleted=1 WHERE user_id = :uid AND deleted=0", sq.PP{"uid": userid})
if err != nil {
return err
}
return nil
}

View File

@ -182,7 +182,7 @@ func (db *Database) migrateSingle(tctx *simplectx.SimpleContext, tx sq.Tx, schem
if schemaFrom == schemaTo-1 { if schemaFrom == schemaTo-1 {
migSQL := db.schema[schemaTo].MigScript migSQL := db.schema[schemaTo].MigScript
if migSQL == "" { if len(migSQL) == 0 {
return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build() return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build()
} }
@ -192,7 +192,7 @@ func (db *Database) migrateSingle(tctx *simplectx.SimpleContext, tx sq.Tx, schem
return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build() return exerr.New(exerr.TypeInternal, fmt.Sprintf("missing %s migration from %d to %d", db.name, schemaFrom, schemaTo)).Build()
} }
func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts string, currSchemaVers int, resultSchemVers int, resultHash string, post func(tctx *simplectx.SimpleContext, tx sq.Tx) error) error { func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts []string, currSchemaVers int, resultSchemVers int, resultHash string, post func(tctx *simplectx.SimpleContext, tx sq.Tx) error) error {
schemaHashMeta, err := db.ReadMetaString(tctx, "schema_hash") schemaHashMeta, err := db.ReadMetaString(tctx, "schema_hash")
if err != nil { if err != nil {
@ -215,9 +215,13 @@ func (db *Database) migrateBySQL(tctx *simplectx.SimpleContext, tx sq.Tx, stmts
log.Info().Msgf("Upgrade schema from %d -> %d", currSchemaVers, resultSchemVers) log.Info().Msgf("Upgrade schema from %d -> %d", currSchemaVers, resultSchemVers)
_, err = tx.Exec(tctx, stmts, sq.PP{}) for i, stmt := range stmts {
if err != nil { log.Info().Msgf("SQL-Migration of [%s]: %d/%d", db.name, i+1, len(stmts))
return err
_, err := tx.Exec(tctx, stmt, sq.PP{})
if err != nil {
return err
}
} }
schemHashDBAfter, err := sq.HashSqliteDatabase(tctx, tx) schemHashDBAfter, err := sq.HashSqliteDatabase(tctx, tx)

View File

@ -3,12 +3,13 @@ package schema
import ( import (
"embed" "embed"
_ "embed" _ "embed"
"strings"
) )
type Def struct { type Def struct {
SQL string SQL string
Hash string Hash string
MigScript string MigScript []string
} }
//go:embed *.ddl //go:embed *.ddl
@ -16,29 +17,30 @@ type Def struct {
var assets embed.FS var assets embed.FS
var PrimarySchema = map[int]Def{ var PrimarySchema = map[int]Def{
0: {"", "", ""}, 0: {"", "", nil},
1: {readDDL("primary_1.ddl"), "f2b2847f32681a7178e405553beea4a324034915a0c5a5dc70b3c6abbcc852f2", ""}, 1: {readDDL("primary_1.ddl"), "f2b2847f32681a7178e405553beea4a324034915a0c5a5dc70b3c6abbcc852f2", nil},
2: {readDDL("primary_2.ddl"), "07ed1449114416ed043084a30e0722a5f97bf172161338d2f7106a8dfd387d0a", ""}, 2: {readDDL("primary_2.ddl"), "07ed1449114416ed043084a30e0722a5f97bf172161338d2f7106a8dfd387d0a", nil},
3: {readDDL("primary_3.ddl"), "65c2125ad0e12d02490cf2275f0067ef3c62a8522edf9a35ee8aa3f3c09b12e8", ""}, 3: {readDDL("primary_3.ddl"), "65c2125ad0e12d02490cf2275f0067ef3c62a8522edf9a35ee8aa3f3c09b12e8", nil},
4: {readDDL("primary_4.ddl"), "cb022156ab0e7aea39dd0c985428c43cae7d60e41ca8e9e5a84c774b3019d2ca", readMig("primary_migration_3_4.sql")}, 4: {readDDL("primary_4.ddl"), "cb022156ab0e7aea39dd0c985428c43cae7d60e41ca8e9e5a84c774b3019d2ca", readMig("primary_migration_3_4.sql")},
5: {readDDL("primary_5.ddl"), "9d6217ba4a3503cfe090f72569367f95a413bb14e9effe49ffeabbf255bce8dd", readMig("primary_migration_4_5.sql")}, 5: {readDDL("primary_5.ddl"), "9d6217ba4a3503cfe090f72569367f95a413bb14e9effe49ffeabbf255bce8dd", readMig("primary_migration_4_5.sql")},
6: {readDDL("primary_6.ddl"), "8e83d20bcd008082713f248ae8cd558335a37a37ce90bd8c86e782da640ee160", readMig("primary_migration_5_6.sql")}, 6: {readDDL("primary_6.ddl"), "8e83d20bcd008082713f248ae8cd558335a37a37ce90bd8c86e782da640ee160", readMig("primary_migration_5_6.sql")},
7: {readDDL("primary_7.ddl"), "90d8dbc460afe025f9b74cda5c16bb8e58b178df275223bd2531907a8d8c36c3", readMig("primary_migration_6_7.sql")}, 7: {readDDL("primary_7.ddl"), "90d8dbc460afe025f9b74cda5c16bb8e58b178df275223bd2531907a8d8c36c3", readMig("primary_migration_6_7.sql")},
8: {readDDL("primary_8.ddl"), "746f6005c7a573b8816e5993ecd1d949fe2552b0134ba63bab8b4d5b2b5058ad", readMig("primary_migration_7_8.sql")}, 8: {readDDL("primary_8.ddl"), "746f6005c7a573b8816e5993ecd1d949fe2552b0134ba63bab8b4d5b2b5058ad", readMig("primary_migration_7_8.sql")},
9: {readDDL("primary_9.ddl"), "0fd1eacb03364153b2a2096106b1a11cc48ae764db52c2896dab9725b55ed188", readMig("primary_migration_8_9.sql")},
} }
var PrimarySchemaVersion = len(PrimarySchema) - 1 var PrimarySchemaVersion = len(PrimarySchema) - 1
var RequestsSchema = map[int]Def{ var RequestsSchema = map[int]Def{
0: {"", "", ""}, 0: {"", "", nil},
1: {readDDL("requests_1.ddl"), "ebb0a5748b605e8215437413b738279670190ca8159b6227cfc2aa13418b41e9", ""}, 1: {readDDL("requests_1.ddl"), "ebb0a5748b605e8215437413b738279670190ca8159b6227cfc2aa13418b41e9", nil},
} }
var RequestsSchemaVersion = len(RequestsSchema) - 1 var RequestsSchemaVersion = len(RequestsSchema) - 1
var LogsSchema = map[int]Def{ var LogsSchema = map[int]Def{
0: {"", "", ""}, 0: {"", "", nil},
1: {readDDL("logs_1.ddl"), "65fba477c04095effc3a8e1bb79fe7547b8e52e983f776f156266eddc4f201d7", ""}, 1: {readDDL("logs_1.ddl"), "65fba477c04095effc3a8e1bb79fe7547b8e52e983f776f156266eddc4f201d7", nil},
} }
var LogsSchemaVersion = len(LogsSchema) - 1 var LogsSchemaVersion = len(LogsSchema) - 1
@ -51,10 +53,51 @@ func readDDL(name string) string {
return string(data) return string(data)
} }
func readMig(name string) string { func readMig(name string) []string {
data, err := assets.ReadFile(name) data, err := assets.ReadFile(name)
if err != nil { if err != nil {
panic(err) panic(err)
} }
return string(data) return splitMigration(string(data))
}
func splitMigration(input string) []string {
arr := make([]string, 0)
acc := ""
waitForEnd := false
for _, line := range strings.Split(input, "\n") {
if strings.TrimSpace(line) == "" {
continue
}
if strings.HasPrefix(strings.TrimSpace(line), "--") {
continue
}
acc += line
if strings.HasSuffix(strings.TrimSpace(line), "BEGIN") || strings.HasPrefix(strings.TrimSpace(line), "BEGIN") {
waitForEnd = true
}
if strings.HasPrefix(strings.TrimSpace(line), "END") {
waitForEnd = false
}
if strings.HasSuffix(acc, ";") && !waitForEnd {
arr = append(arr, acc)
acc = ""
continue
} else {
acc += "\n"
}
}
if strings.TrimSpace(acc) != "" {
arr = append(arr, acc)
}
return arr
} }

View File

@ -0,0 +1,249 @@
CREATE TABLE users
(
user_id TEXT NOT NULL,
username TEXT NULL DEFAULT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_lastread INTEGER NULL DEFAULT NULL,
timestamp_lastsent INTEGER NULL DEFAULT NULL,
messages_sent INTEGER NOT NULL DEFAULT '0',
quota_used INTEGER NOT NULL DEFAULT '0',
quota_used_day TEXT NULL DEFAULT NULL,
is_pro INTEGER CHECK(is_pro IN (0, 1)) NOT NULL DEFAULT 0,
pro_token TEXT NULL DEFAULT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (user_id)
) STRICT;
CREATE UNIQUE INDEX "idx_users_protoken" ON users (pro_token) WHERE pro_token IS NOT NULL AND deleted=0;
CREATE TABLE keytokens
(
keytoken_id TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_lastused INTEGER NULL DEFAULT NULL,
name TEXT NOT NULL,
owner_user_id TEXT NOT NULL,
all_channels INTEGER CHECK(all_channels IN (0, 1)) NOT NULL,
channels TEXT NOT NULL,
token TEXT NOT NULL,
permissions TEXT NOT NULL,
messages_sent INTEGER NOT NULL DEFAULT '0',
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (keytoken_id)
) STRICT;
CREATE UNIQUE INDEX "idx_keytokens_token" ON keytokens (token);
CREATE TABLE clients
(
client_id TEXT NOT NULL,
user_id TEXT NOT NULL,
type TEXT CHECK(type IN ('ANDROID','IOS','LINUX','MACOS','WINDOWS')) NOT NULL,
fcm_token TEXT NOT NULL,
name TEXT NULL,
timestamp_created INTEGER NOT NULL,
agent_model TEXT NOT NULL,
agent_version TEXT NOT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (client_id)
) STRICT;
CREATE INDEX "idx_clients_userid" ON clients (user_id);
CREATE INDEX "idx_clients_deleted" ON clients (deleted);
CREATE UNIQUE INDEX "idx_clients_fcmtoken" ON clients (fcm_token) WHERE deleted=0;
CREATE TABLE channels
(
channel_id TEXT NOT NULL,
owner_user_id TEXT NOT NULL,
internal_name TEXT NOT NULL,
display_name TEXT NOT NULL,
description_name TEXT NULL,
subscribe_key TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_lastsent INTEGER NULL DEFAULT NULL,
messages_sent INTEGER NOT NULL DEFAULT '0',
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (channel_id)
) STRICT;
CREATE UNIQUE INDEX "idx_channels_identity" ON channels (owner_user_id, internal_name) WHERE deleted=0;
CREATE TABLE subscriptions
(
subscription_id TEXT NOT NULL,
subscriber_user_id TEXT NOT NULL,
channel_owner_user_id TEXT NOT NULL,
channel_internal_name TEXT NOT NULL,
channel_id TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
confirmed INTEGER CHECK(confirmed IN (0, 1)) NOT NULL,
active INTEGER CHECK(active IN (0, 1)) NOT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (subscription_id)
) STRICT;
CREATE UNIQUE INDEX "idx_subscriptions_ref" ON subscriptions (subscriber_user_id, channel_owner_user_id, channel_internal_name) WHERE deleted=0;
CREATE INDEX "idx_subscriptions_chan" ON subscriptions (channel_id);
CREATE INDEX "idx_subscriptions_subuser" ON subscriptions (subscriber_user_id);
CREATE INDEX "idx_subscriptions_ownuser" ON subscriptions (channel_owner_user_id);
CREATE INDEX "idx_subscriptions_tsc" ON subscriptions (timestamp_created);
CREATE INDEX "idx_subscriptions_conf" ON subscriptions (confirmed);
CREATE TABLE messages
(
message_id TEXT NOT NULL,
sender_user_id TEXT NOT NULL,
channel_internal_name TEXT NOT NULL,
channel_id TEXT NOT NULL,
channel_owner_user_id TEXT NOT NULL,
sender_ip TEXT NOT NULL,
sender_name TEXT NULL,
timestamp_real INTEGER NOT NULL,
timestamp_client INTEGER NULL,
title TEXT NOT NULL,
content TEXT NULL,
priority INTEGER CHECK(priority IN (0, 1, 2)) NOT NULL,
usr_message_id TEXT NULL,
used_key_id TEXT NOT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (message_id)
) STRICT;
CREATE INDEX "idx_messages_channel" ON messages (channel_internal_name COLLATE BINARY);
CREATE INDEX "idx_messages_channel_nc" ON messages (channel_internal_name COLLATE NOCASE);
CREATE UNIQUE INDEX "idx_messages_idempotency" ON messages (sender_user_id, usr_message_id COLLATE BINARY);
CREATE INDEX "idx_messages_senderip" ON messages (sender_ip COLLATE BINARY);
CREATE INDEX "idx_messages_sendername" ON messages (sender_name COLLATE BINARY);
CREATE INDEX "idx_messages_sendername_nc" ON messages (sender_name COLLATE NOCASE);
CREATE INDEX "idx_messages_title" ON messages (title COLLATE BINARY);
CREATE INDEX "idx_messages_title_nc" ON messages (title COLLATE NOCASE);
CREATE INDEX "idx_messages_usedkey" ON messages (sender_user_id, used_key_id);
CREATE INDEX "idx_messages_deleted" ON messages (deleted);
CREATE VIRTUAL TABLE messages_fts USING fts5
(
channel_internal_name,
sender_name,
title,
content,
tokenize = unicode61,
content = 'messages',
content_rowid = 'rowid'
);
CREATE TRIGGER fts_insert AFTER INSERT ON messages BEGIN
INSERT INTO messages_fts (rowid, channel_internal_name, sender_name, title, content) VALUES (new.rowid, new.channel_internal_name, new.sender_name, new.title, new.content);
END;
CREATE TRIGGER fts_update AFTER UPDATE ON messages BEGIN
INSERT INTO messages_fts (messages_fts, rowid, channel_internal_name, sender_name, title, content) VALUES ('delete', old.rowid, old.channel_internal_name, old.sender_name, old.title, old.content);
INSERT INTO messages_fts ( rowid, channel_internal_name, sender_name, title, content) VALUES ( new.rowid, new.channel_internal_name, new.sender_name, new.title, new.content);
END;
CREATE TRIGGER fts_delete AFTER DELETE ON messages BEGIN
INSERT INTO messages_fts (messages_fts, rowid, channel_internal_name, sender_name, title, content) VALUES ('delete', old.rowid, old.channel_internal_name, old.sender_name, old.title, old.content);
END;
CREATE TABLE deliveries
(
delivery_id TEXT NOT NULL,
message_id TEXT NOT NULL,
receiver_user_id TEXT NOT NULL,
receiver_client_id TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_finalized INTEGER NULL,
status TEXT CHECK(status IN ('RETRY','SUCCESS','FAILED')) NOT NULL,
retry_count INTEGER NOT NULL DEFAULT 0,
next_delivery INTEGER NULL DEFAULT NULL,
fcm_message_id TEXT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (delivery_id)
) STRICT;
CREATE INDEX "idx_deliveries_receiver" ON deliveries (message_id, receiver_client_id);
CREATE TABLE compat_ids
(
old INTEGER NOT NULL,
new TEXT NOT NULL,
type TEXT NOT NULL
) STRICT;
CREATE UNIQUE INDEX "idx_compatids_new" ON compat_ids (new);
CREATE UNIQUE INDEX "idx_compatids_old" ON compat_ids (old, type);
CREATE TABLE compat_acks
(
user_id TEXT NOT NULL,
message_id TEXT NOT NULL
) STRICT;
CREATE INDEX "idx_compatacks_userid" ON compat_acks (user_id);
CREATE UNIQUE INDEX "idx_compatacks_messageid" ON compat_acks (message_id);
CREATE UNIQUE INDEX "idx_compatacks_userid_messageid" ON compat_acks (user_id, message_id);
CREATE TABLE compat_clients
(
client_id TEXT NOT NULL
) STRICT;
CREATE UNIQUE INDEX "idx_compatclient_clientid" ON compat_clients (client_id);
CREATE TABLE `meta`
(
meta_key TEXT NOT NULL,
value_int INTEGER NULL,
value_txt TEXT NULL,
value_real REAL NULL,
value_blob BLOB NULL,
PRIMARY KEY (meta_key)
) STRICT;
INSERT INTO meta (meta_key, value_int) VALUES ('schema', 3)

View File

@ -0,0 +1,320 @@
-- Add deleted to channels ( migrate existing as '0' )
-- Add deleted to keytokens ( migrate existing as '0' )
-- Add deleted to subscriptions ( migrate existing as '0' )
-- Add deleted to users ( migrate existing as '0' )
-- Add deleted to deliveries ( migrate existing as '0' )
--
-- Add active to subcsriptions ( migrate existing as '1' )
--
-- Add channel_owner_id to messages ( migrate existing by looking up channel )
--
------------------------------------------------------------------------------------------------------------------------
DROP INDEX "idx_users_protoken";
CREATE TABLE __new_users
(
user_id TEXT NOT NULL,
username TEXT NULL DEFAULT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_lastread INTEGER NULL DEFAULT NULL,
timestamp_lastsent INTEGER NULL DEFAULT NULL,
messages_sent INTEGER NOT NULL DEFAULT '0',
quota_used INTEGER NOT NULL DEFAULT '0',
quota_used_day TEXT NULL DEFAULT NULL,
is_pro INTEGER CHECK(is_pro IN (0, 1)) NOT NULL DEFAULT 0,
pro_token TEXT NULL DEFAULT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (user_id)
) STRICT;
INSERT INTO __new_users
SELECT
user_id,
username,
timestamp_created,
timestamp_lastread,
timestamp_lastsent,
messages_sent,
quota_used,
quota_used_day,
is_pro,
pro_token,
0 AS deleted
FROM users;
DROP TABLE users;
ALTER TABLE __new_users RENAME TO users;
CREATE UNIQUE INDEX "idx_users_protoken" ON users (pro_token) WHERE pro_token IS NOT NULL AND deleted=0;
------------------------------------------------------------------------------------------------------------------------
DROP INDEX "idx_keytokens_token";
CREATE TABLE __new_keytokens
(
keytoken_id TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_lastused INTEGER NULL DEFAULT NULL,
name TEXT NOT NULL,
owner_user_id TEXT NOT NULL,
all_channels INTEGER CHECK(all_channels IN (0, 1)) NOT NULL,
channels TEXT NOT NULL,
token TEXT NOT NULL,
permissions TEXT NOT NULL,
messages_sent INTEGER NOT NULL DEFAULT '0',
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (keytoken_id)
) STRICT;
INSERT INTO __new_keytokens
SELECT
keytoken_id,
timestamp_created,
timestamp_lastused,
name,
owner_user_id,
all_channels,
channels,
token,
permissions,
messages_sent,
0 AS deleted
FROM keytokens;
DROP TABLE keytokens;
ALTER TABLE __new_keytokens RENAME TO keytokens;
CREATE UNIQUE INDEX "idx_keytokens_token" ON keytokens (token);
------------------------------------------------------------------------------------------------------------------------
DROP INDEX "idx_deliveries_receiver";
CREATE TABLE __new_deliveries
(
delivery_id TEXT NOT NULL,
message_id TEXT NOT NULL,
receiver_user_id TEXT NOT NULL,
receiver_client_id TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_finalized INTEGER NULL,
status TEXT CHECK(status IN ('RETRY','SUCCESS','FAILED')) NOT NULL,
retry_count INTEGER NOT NULL DEFAULT 0,
next_delivery INTEGER NULL DEFAULT NULL,
fcm_message_id TEXT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (delivery_id)
) STRICT;
INSERT INTO __new_deliveries
SELECT delivery_id,
message_id,
receiver_user_id,
receiver_client_id,
timestamp_created,
timestamp_finalized,
status,
retry_count,
next_delivery,
fcm_message_id,
0 as deleted
FROM deliveries;
DROP TABLE deliveries;
ALTER TABLE __new_deliveries RENAME TO deliveries;
CREATE INDEX "idx_deliveries_receiver" ON deliveries (message_id, receiver_client_id);
------------------------------------------------------------------------------------------------------------------------
DROP INDEX "idx_messages_channel";
DROP INDEX "idx_messages_channel_nc";
DROP INDEX "idx_messages_idempotency";
DROP INDEX "idx_messages_senderip";
DROP INDEX "idx_messages_sendername";
DROP INDEX "idx_messages_sendername_nc";
DROP INDEX "idx_messages_title";
DROP INDEX "idx_messages_title_nc";
DROP INDEX "idx_messages_usedkey";
DROP INDEX "idx_messages_deleted";
CREATE TABLE __new_messages
(
message_id TEXT NOT NULL,
sender_user_id TEXT NOT NULL,
channel_internal_name TEXT NOT NULL,
channel_id TEXT NOT NULL,
channel_owner_user_id TEXT NOT NULL,
sender_ip TEXT NOT NULL,
sender_name TEXT NULL,
timestamp_real INTEGER NOT NULL,
timestamp_client INTEGER NULL,
title TEXT NOT NULL,
content TEXT NULL,
priority INTEGER CHECK(priority IN (0, 1, 2)) NOT NULL,
usr_message_id TEXT NULL,
used_key_id TEXT NOT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (message_id)
) STRICT;
INSERT INTO __new_messages
SELECT
m.message_id,
m.sender_user_id,
m.channel_internal_name,
m.channel_id,
c.owner_user_id,
m.sender_ip,
m.sender_name,
m.timestamp_real,
m.timestamp_client,
m.title,
m.content,
m.priority,
m.usr_message_id,
m.used_key_id,
m.deleted
FROM messages m
JOIN channels c ON m.channel_id = c.channel_id;
DROP TABLE messages;
ALTER TABLE __new_messages RENAME TO messages;
CREATE INDEX "idx_messages_channel" ON messages (channel_internal_name COLLATE BINARY);
CREATE INDEX "idx_messages_channel_nc" ON messages (channel_internal_name COLLATE NOCASE);
CREATE UNIQUE INDEX "idx_messages_idempotency" ON messages (sender_user_id, usr_message_id COLLATE BINARY);
CREATE INDEX "idx_messages_senderip" ON messages (sender_ip COLLATE BINARY);
CREATE INDEX "idx_messages_sendername" ON messages (sender_name COLLATE BINARY);
CREATE INDEX "idx_messages_sendername_nc" ON messages (sender_name COLLATE NOCASE);
CREATE INDEX "idx_messages_title" ON messages (title COLLATE BINARY);
CREATE INDEX "idx_messages_title_nc" ON messages (title COLLATE NOCASE);
CREATE INDEX "idx_messages_usedkey" ON messages (sender_user_id, used_key_id);
CREATE INDEX "idx_messages_deleted" ON messages (deleted);
DROP TRIGGER IF EXISTS fts_insert;
DROP TRIGGER IF EXISTS fts_update;
DROP TRIGGER IF EXISTS fts_delete;
DROP TABLE IF EXISTS messages_fts;
CREATE VIRTUAL TABLE messages_fts USING fts5
(
channel_internal_name,
sender_name,
title,
content,
tokenize = unicode61,
content = 'messages',
content_rowid = 'rowid'
);
CREATE TRIGGER fts_insert AFTER INSERT ON messages BEGIN
INSERT INTO messages_fts (rowid, channel_internal_name, sender_name, title, content) VALUES (new.rowid, new.channel_internal_name, new.sender_name, new.title, new.content);
END;
CREATE TRIGGER fts_update AFTER UPDATE ON messages BEGIN
INSERT INTO messages_fts (messages_fts, rowid, channel_internal_name, sender_name, title, content) VALUES ('delete', old.rowid, old.channel_internal_name, old.sender_name, old.title, old.content);
INSERT INTO messages_fts ( rowid, channel_internal_name, sender_name, title, content) VALUES ( new.rowid, new.channel_internal_name, new.sender_name, new.title, new.content);
END;
CREATE TRIGGER fts_delete AFTER DELETE ON messages BEGIN
INSERT INTO messages_fts (messages_fts, rowid, channel_internal_name, sender_name, title, content) VALUES ('delete', old.rowid, old.channel_internal_name, old.sender_name, old.title, old.content);
END;
INSERT INTO messages_fts (rowid, channel_internal_name, sender_name, title, content)
SELECT rowid, channel_internal_name, sender_name, title, content FROM messages;
------------------------------------------------------------------------------------------------------------------------
DROP INDEX "idx_channels_identity";
CREATE TABLE __new_channels
(
channel_id TEXT NOT NULL,
owner_user_id TEXT NOT NULL,
internal_name TEXT NOT NULL,
display_name TEXT NOT NULL,
description_name TEXT NULL,
subscribe_key TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
timestamp_lastsent INTEGER NULL DEFAULT NULL,
messages_sent INTEGER NOT NULL DEFAULT '0',
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (channel_id)
) STRICT;
INSERT INTO __new_channels
SELECT
channel_id,
owner_user_id,
internal_name,
display_name,
description_name,
subscribe_key,
timestamp_created,
timestamp_lastsent,
messages_sent,
0 AS deleted
FROM channels;
DROP TABLE channels;
ALTER TABLE __new_channels RENAME TO channels;
CREATE UNIQUE INDEX "idx_channels_identity" ON channels (owner_user_id, internal_name) WHERE deleted=0;
------------------------------------------------------------------------------------------------------------------------
DROP INDEX "idx_subscriptions_ref";
DROP INDEX "idx_subscriptions_chan";
DROP INDEX "idx_subscriptions_subuser";
DROP INDEX "idx_subscriptions_ownuser";
DROP INDEX "idx_subscriptions_tsc";
DROP INDEX "idx_subscriptions_conf";
CREATE TABLE __new_subscriptions
(
subscription_id TEXT NOT NULL,
subscriber_user_id TEXT NOT NULL,
channel_owner_user_id TEXT NOT NULL,
channel_internal_name TEXT NOT NULL,
channel_id TEXT NOT NULL,
timestamp_created INTEGER NOT NULL,
confirmed INTEGER CHECK(confirmed IN (0, 1)) NOT NULL,
active INTEGER CHECK(active IN (0, 1)) NOT NULL,
deleted INTEGER CHECK(deleted IN (0, 1)) NOT NULL DEFAULT '0',
PRIMARY KEY (subscription_id)
) STRICT;
INSERT INTO __new_subscriptions
SELECT
subscription_id,
subscriber_user_id,
channel_owner_user_id,
channel_internal_name,
channel_id,
timestamp_created,
confirmed,
1 AS active,
0 AS deleted
FROM subscriptions;
DROP TABLE subscriptions;
ALTER TABLE __new_subscriptions RENAME TO subscriptions;
CREATE UNIQUE INDEX "idx_subscriptions_ref" ON subscriptions (subscriber_user_id, channel_owner_user_id, channel_internal_name) WHERE deleted=0;
CREATE INDEX "idx_subscriptions_chan" ON subscriptions (channel_id);
CREATE INDEX "idx_subscriptions_subuser" ON subscriptions (subscriber_user_id);
CREATE INDEX "idx_subscriptions_ownuser" ON subscriptions (channel_owner_user_id);
CREATE INDEX "idx_subscriptions_tsc" ON subscriptions (timestamp_created);
CREATE INDEX "idx_subscriptions_conf" ON subscriptions (confirmed);
------------------------------------------------------------------------------------------------------------------------

View File

@ -155,7 +155,7 @@ func (app *Application) SendMessage(g *gin.Context, ctx *AppContext, UserID *mod
return nil, langext.Ptr(ginresp.SendAPIError(g, 500, apierr.DATABASE_ERROR, hl.NONE, "Failed to create compat-id", err)) return nil, langext.Ptr(ginresp.SendAPIError(g, 500, apierr.DATABASE_ERROR, hl.NONE, "Failed to create compat-id", err))
} }
subFilter := models.SubscriptionFilter{ChannelID: langext.Ptr([]models.ChannelID{channel.ChannelID}), Confirmed: langext.PTrue} subFilter := models.SubscriptionFilter{ChannelID: langext.Ptr([]models.ChannelID{channel.ChannelID}), Confirmed: langext.PTrue, Active: langext.PTrue}
activeSubscriptions, err := app.Database.Primary.ListSubscriptions(ctx, subFilter) activeSubscriptions, err := app.Database.Primary.ListSubscriptions(ctx, subFilter)
if err != nil { if err != nil {
return nil, langext.Ptr(ginresp.SendAPIError(g, 500, apierr.DATABASE_ERROR, hl.NONE, "Failed to query subscriptions", err)) return nil, langext.Ptr(ginresp.SendAPIError(g, 500, apierr.DATABASE_ERROR, hl.NONE, "Failed to query subscriptions", err))

View File

@ -10,6 +10,7 @@ type Channel struct {
TimestampCreated SCNTime `db:"timestamp_created" json:"timestamp_created"` TimestampCreated SCNTime `db:"timestamp_created" json:"timestamp_created"`
TimestampLastSent *SCNTime `db:"timestamp_lastsent" json:"timestamp_lastsent"` TimestampLastSent *SCNTime `db:"timestamp_lastsent" json:"timestamp_lastsent"`
MessagesSent int `db:"messages_sent" json:"messages_sent"` MessagesSent int `db:"messages_sent" json:"messages_sent"`
Deleted bool `db:"deleted" json:"-"`
} }
type ChannelWithSubscription struct { type ChannelWithSubscription struct {

View File

@ -19,6 +19,7 @@ type Delivery struct {
RetryCount int `db:"retry_count" json:"retry_count"` RetryCount int `db:"retry_count" json:"retry_count"`
NextDelivery *SCNTime `db:"next_delivery" json:"next_delivery"` NextDelivery *SCNTime `db:"next_delivery" json:"next_delivery"`
FCMMessageID *string `db:"fcm_message_id" json:"fcm_message_id"` FCMMessageID *string `db:"fcm_message_id" json:"fcm_message_id"`
Deleted bool `db:"deleted" json:"-"`
} }
func (d Delivery) MaxRetryCount() int { func (d Delivery) MaxRetryCount() int {

View File

@ -5,7 +5,7 @@ package models
import "gogs.mikescher.com/BlackForestBytes/goext/langext" import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import "gogs.mikescher.com/BlackForestBytes/goext/enums" import "gogs.mikescher.com/BlackForestBytes/goext/enums"
const ChecksumEnumGenerator = "a1b9c4807e1cec4ea2a8b19cd447aa4b47c13f8058a12470dff8eeec895ad8f8" // GoExtVersion: 0.0.513 const ChecksumEnumGenerator = "7585f93c9270c25db0e367d668827c6ae89b309b6fd2b3d1aaf7f7016b07e0b8" // GoExtVersion: 0.0.513
// ================================ ClientType ================================ // ================================ ClientType ================================
// //

View File

@ -15,7 +15,7 @@ import "reflect"
import "regexp" import "regexp"
import "strings" import "strings"
const ChecksumCharsetIDGenerator = "a1b9c4807e1cec4ea2a8b19cd447aa4b47c13f8058a12470dff8eeec895ad8f8" // GoExtVersion: 0.0.513 const ChecksumCharsetIDGenerator = "7585f93c9270c25db0e367d668827c6ae89b309b6fd2b3d1aaf7f7016b07e0b8" // GoExtVersion: 0.0.513
const idlen = 24 const idlen = 24

View File

@ -80,6 +80,7 @@ type KeyToken struct {
Token string `db:"token" json:"token" jsonfilter:"INCLUDE_TOKEN"` Token string `db:"token" json:"token" jsonfilter:"INCLUDE_TOKEN"`
Permissions TokenPermissionList `db:"permissions" json:"permissions"` Permissions TokenPermissionList `db:"permissions" json:"permissions"`
MessagesSent int `db:"messages_sent" json:"messages_sent"` MessagesSent int `db:"messages_sent" json:"messages_sent"`
Deleted bool `db:"deleted" json:"-"`
} }
type KeyTokenPreview struct { type KeyTokenPreview struct {

View File

@ -13,10 +13,11 @@ const (
type Message struct { type Message struct {
MessageID MessageID `db:"message_id" json:"message_id"` MessageID MessageID `db:"message_id" json:"message_id"`
SenderUserID UserID `db:"sender_user_id" json:"sender_user_id"` // user that sent the message (this is also the owner of the channel that contains it) SenderUserID UserID `db:"sender_user_id" json:"sender_user_id"` // user that sent the message (this is also the owner of the channel that contains it)
ChannelInternalName string `db:"channel_internal_name" json:"channel_internal_name"` ChannelInternalName string `db:"channel_internal_name" json:"channel_internal_name"` //
ChannelID ChannelID `db:"channel_id" json:"channel_id"` ChannelOwnerUserID UserID `db:"channel_owner_user_id" json:"channel_owner_user_id"` // user that owns the channel
SenderName *string `db:"sender_name" json:"sender_name"` ChannelID ChannelID `db:"channel_id" json:"channel_id"` //
SenderName *string `db:"sender_name" json:"sender_name"` //
SenderIP string `db:"sender_ip" json:"sender_ip"` SenderIP string `db:"sender_ip" json:"sender_ip"`
TimestampReal SCNTime `db:"timestamp_real" json:"-"` TimestampReal SCNTime `db:"timestamp_real" json:"-"`
TimestampClient *SCNTime `db:"timestamp_client" json:"-"` TimestampClient *SCNTime `db:"timestamp_client" json:"-"`

View File

@ -14,60 +14,59 @@ import (
) )
type MessageFilter struct { type MessageFilter struct {
ConfirmedSubscriptionBy *UserID ConfirmedAndActiveSubscriptionBy *UserID
SearchStringFTS *[]string SearchStringFTS *[]string
SearchStringPlain *[]string SearchStringPlain *[]string
Sender *[]UserID Sender *[]UserID
ChannelNameCS *[]string // case-sensitive ChannelNameCS *[]string // case-sensitive
ChannelNameCI *[]string // case-insensitive ChannelNameCI *[]string // case-insensitive
ChannelID *[]ChannelID ChannelID *[]ChannelID
SenderNameCS *[]string // case-sensitive SenderNameCS *[]string // case-sensitive
SenderNameCI *[]string // case-insensitive SenderNameCI *[]string // case-insensitive
HasSenderName *bool HasSenderName *bool
SenderIP *[]string SenderIP *[]string
TimestampCoalesce *time.Time TimestampCoalesce *time.Time
TimestampCoalesceAfter *time.Time TimestampCoalesceAfter *time.Time
TimestampCoalesceBefore *time.Time TimestampCoalesceBefore *time.Time
TimestampReal *time.Time TimestampReal *time.Time
TimestampRealAfter *time.Time TimestampRealAfter *time.Time
TimestampRealBefore *time.Time TimestampRealBefore *time.Time
TimestampClient *time.Time TimestampClient *time.Time
TimestampClientAfter *time.Time TimestampClientAfter *time.Time
TimestampClientBefore *time.Time TimestampClientBefore *time.Time
TitleCS *string // case-sensitive TitleCS *string // case-sensitive
TitleCI *string // case-insensitive TitleCI *string // case-insensitive
Priority *[]int Priority *[]int
UserMessageID *[]string UserMessageID *[]string
OnlyDeleted bool OnlyDeleted bool
IncludeDeleted bool IncludeDeleted bool
CompatAcknowledged *bool CompatAcknowledged *bool
UsedKeyID *[]KeyTokenID UsedKeyID *[]KeyTokenID
} }
func (f MessageFilter) SQL() (string, string, sq.PP, error) { func (f MessageFilter) SQL() (string, string, sq.PP, error) {
params := sq.PP{}
joinClause := "" joinClause := ""
if f.ConfirmedSubscriptionBy != nil { if f.ConfirmedAndActiveSubscriptionBy != nil {
joinClause += " LEFT JOIN subscriptions AS subs on messages.channel_id = subs.channel_id " joinClause += fmt.Sprintf(" LEFT JOIN subscriptions AS subs ON (messages.channel_id = subs.channel_id AND subs.subscriber_user_id = :%s AND subs.confirmed=1 AND subs.active=1 AND subs.deleted=0) ", params.Add(*f.ConfirmedAndActiveSubscriptionBy))
} }
if f.SearchStringFTS != nil { if f.SearchStringFTS != nil {
joinClause += " JOIN messages_fts AS mfts on (mfts.rowid = messages.rowid) " joinClause += " JOIN messages_fts AS mfts ON (mfts.rowid = messages.rowid) "
} }
sqlClauses := make([]string, 0) sqlClauses := make([]string, 0)
params := sq.PP{}
if f.OnlyDeleted { if f.OnlyDeleted {
sqlClauses = append(sqlClauses, "(deleted=1)") sqlClauses = append(sqlClauses, "(messages.deleted=1)")
} else if f.IncludeDeleted { } else if f.IncludeDeleted {
// nothing, return all // nothing, return all
} else { } else {
sqlClauses = append(sqlClauses, "(deleted=0)") // default sqlClauses = append(sqlClauses, "(messages.deleted=0)") // default
} }
if f.ConfirmedSubscriptionBy != nil { if f.ConfirmedAndActiveSubscriptionBy != nil {
sqlClauses = append(sqlClauses, fmt.Sprintf("(subs.subscriber_user_id = :%s AND subs.confirmed = 1)", params.Add(*f.ConfirmedSubscriptionBy))) sqlClauses = append(sqlClauses, "(subs.confirmed=1 AND subs.active=1 AND subs.deleted=0)")
} }
if f.Sender != nil { if f.Sender != nil {

View File

@ -8,11 +8,13 @@ package models
// (use keytokens for write-access) // (use keytokens for write-access)
type Subscription struct { type Subscription struct {
SubscriptionID SubscriptionID `db:"subscription_id" json:"subscription_id"` SubscriptionID SubscriptionID `db:"subscription_id" json:"subscription_id"` //
SubscriberUserID UserID `db:"subscriber_user_id" json:"subscriber_user_id"` SubscriberUserID UserID `db:"subscriber_user_id" json:"subscriber_user_id"` //
ChannelOwnerUserID UserID `db:"channel_owner_user_id" json:"channel_owner_user_id"` ChannelOwnerUserID UserID `db:"channel_owner_user_id" json:"channel_owner_user_id"` //
ChannelID ChannelID `db:"channel_id" json:"channel_id"` ChannelID ChannelID `db:"channel_id" json:"channel_id"` //
ChannelInternalName string `db:"channel_internal_name" json:"channel_internal_name"` ChannelInternalName string `db:"channel_internal_name" json:"channel_internal_name"` //
TimestampCreated SCNTime `db:"timestamp_created" json:"timestamp_created"` TimestampCreated SCNTime `db:"timestamp_created" json:"timestamp_created"` //
Confirmed bool `db:"confirmed" json:"confirmed"` Confirmed bool `db:"confirmed" json:"confirmed"` // Channel-Owner confirmed subscription
Active bool `db:"active" json:"active"` // Subscriber has activated the subscription (default)
Deleted bool `db:"deleted" json:"-"` //
} }

View File

@ -19,6 +19,7 @@ type SubscriptionFilter struct {
ChannelOwnerUserID2 *[]UserID // Used to filter <ChannelOwnerUserID> again ChannelOwnerUserID2 *[]UserID // Used to filter <ChannelOwnerUserID> again
ChannelID *[]ChannelID ChannelID *[]ChannelID
Confirmed *bool Confirmed *bool
Active *bool
SubscriberIsChannelOwner *bool SubscriberIsChannelOwner *bool
Timestamp *time.Time Timestamp *time.Time
TimestampAfter *time.Time TimestampAfter *time.Time
@ -33,6 +34,8 @@ func (f SubscriptionFilter) SQL() (string, string, sq.PP, error) {
params := sq.PP{} params := sq.PP{}
sqlClauses = append(sqlClauses, "(deleted=0)")
if f.AnyUserID != nil { if f.AnyUserID != nil {
sqlClauses = append(sqlClauses, fmt.Sprintf("(subscriber_user_id = :%s OR channel_owner_user_id = :%s)", params.Add(*f.AnyUserID), params.Add(*f.AnyUserID))) sqlClauses = append(sqlClauses, fmt.Sprintf("(subscriber_user_id = :%s OR channel_owner_user_id = :%s)", params.Add(*f.AnyUserID), params.Add(*f.AnyUserID)))
} }
@ -85,6 +88,14 @@ func (f SubscriptionFilter) SQL() (string, string, sq.PP, error) {
} }
} }
if f.Active != nil {
if *f.Active {
sqlClauses = append(sqlClauses, "(active=1)")
} else {
sqlClauses = append(sqlClauses, "(active=0)")
}
}
if f.SubscriberIsChannelOwner != nil { if f.SubscriberIsChannelOwner != nil {
if *f.SubscriberIsChannelOwner { if *f.SubscriberIsChannelOwner {
sqlClauses = append(sqlClauses, "(subscriber_user_id = channel_owner_user_id)") sqlClauses = append(sqlClauses, "(subscriber_user_id = channel_owner_user_id)")

View File

@ -15,6 +15,7 @@ type User struct {
QuotaUsedDay *string `db:"quota_used_day" json:"-"` QuotaUsedDay *string `db:"quota_used_day" json:"-"`
IsPro bool `db:"is_pro" json:"is_pro"` IsPro bool `db:"is_pro" json:"is_pro"`
ProToken *string `db:"pro_token" json:"-"` ProToken *string `db:"pro_token" json:"-"`
Deleted bool `db:"deleted" json:"-"`
UserExtra `db:"-"` // fields that are not in DB and are set on PreMarshal UserExtra `db:"-"` // fields that are not in DB and are set on PreMarshal
} }

View File

@ -39,3 +39,7 @@ func (d *TestSink) SendNotification(ctx context.Context, user models.User, clien
return key, "", nil return key, "", nil
} }
func (d *TestSink) Clear() {
d.Data = make([]SinkData, 0)
}

View File

@ -881,11 +881,6 @@
"name": "channel_id", "name": "channel_id",
"in": "query" "in": "query"
}, },
{
"type": "string",
"name": "filter",
"in": "query"
},
{ {
"type": "boolean", "type": "boolean",
"name": "has_sender", "name": "has_sender",
@ -910,6 +905,15 @@
"name": "priority", "name": "priority",
"in": "query" "in": "query"
}, },
{
"type": "array",
"items": {
"type": "string"
},
"collectionFormat": "csv",
"name": "search",
"in": "query"
},
{ {
"type": "array", "type": "array",
"items": { "items": {
@ -919,6 +923,15 @@
"name": "sender", "name": "sender",
"in": "query" "in": "query"
}, },
{
"type": "array",
"items": {
"type": "string"
},
"collectionFormat": "csv",
"name": "string_search",
"in": "query"
},
{ {
"type": "boolean", "type": "boolean",
"name": "trimmed", "name": "trimmed",
@ -1122,7 +1135,7 @@
"parameters": [ "parameters": [
{ {
"type": "string", "type": "string",
"description": "TokenKeyID", "description": "TokenKeyID (actual token || token-id)",
"name": "kid", "name": "kid",
"in": "path", "in": "path",
"required": true "required": true
@ -1342,12 +1355,11 @@
} }
}, },
"patch": { "patch": {
"description": "The body-values are optional, only send the ones you want to update",
"tags": [ "tags": [
"API-v2" "API-v2"
], ],
"summary": "(Partially) update a user", "summary": "(Self-)Deletes a user (including all entities - all messages, channels, clients, .....)",
"operationId": "api-user-update", "operationId": "api-user-delete",
"parameters": [ "parameters": [
{ {
"type": "string", "type": "string",
@ -1355,22 +1367,6 @@
"name": "uid", "name": "uid",
"in": "path", "in": "path",
"required": true "required": true
},
{
"description": "Change the username (send an empty string to clear it)",
"name": "username",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "Send a verification of premium purchase",
"name": "pro_token",
"in": "body",
"schema": {
"type": "string"
}
} }
], ],
"responses": { "responses": {
@ -1581,8 +1577,8 @@
"tags": [ "tags": [
"API-v2" "API-v2"
], ],
"summary": "(Partially) update a channel", "summary": "delete a channel (including all messages, subscriptions, etc)",
"operationId": "api-channels-update", "operationId": "api-channels-delete",
"parameters": [ "parameters": [
{ {
"type": "string", "type": "string",
@ -1597,37 +1593,13 @@
"name": "cid", "name": "cid",
"in": "path", "in": "path",
"required": true "required": true
},
{
"description": "Send `true` to create a new subscribe_key",
"name": "subscribe_key",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "Send `true` to create a new send_key",
"name": "send_key",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "Change the cahnnel display-name (only chnages to lowercase/uppercase are allowed - internal_name must stay the same)",
"name": "display_name",
"in": "body",
"schema": {
"type": "string"
}
} }
], ],
"responses": { "responses": {
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/models.ChannelWithSubscription" "$ref": "#/definitions/models.Channel"
} }
}, },
"400": { "400": {
@ -1666,11 +1638,6 @@
"summary": "List messages of a channel", "summary": "List messages of a channel",
"operationId": "api-channel-messages", "operationId": "api-channel-messages",
"parameters": [ "parameters": [
{
"type": "string",
"name": "filter",
"in": "query"
},
{ {
"type": "string", "type": "string",
"name": "next_page_token", "name": "next_page_token",
@ -3727,6 +3694,9 @@
"handler.UpdateSubscription.body": { "handler.UpdateSubscription.body": {
"type": "object", "type": "object",
"properties": { "properties": {
"active": {
"type": "boolean"
},
"confirmed": { "confirmed": {
"type": "boolean" "type": "boolean"
} }
@ -3862,6 +3832,42 @@
} }
} }
}, },
"models.Channel": {
"type": "object",
"properties": {
"channel_id": {
"type": "string"
},
"description_name": {
"description": "= DescriptionName, (optional), longer description text, initally nil",
"type": "string"
},
"display_name": {
"description": "= DisplayName, used for display purposes, can be changed, initially equals InternalName",
"type": "string"
},
"internal_name": {
"description": "= InternalName, used for sending, normalized, cannot be changed",
"type": "string"
},
"messages_sent": {
"type": "integer"
},
"owner_user_id": {
"type": "string"
},
"subscribe_key": {
"description": "can be nil, depending on endpoint",
"type": "string"
},
"timestamp_created": {
"type": "string"
},
"timestamp_lastsent": {
"type": "string"
}
}
},
"models.ChannelPreview": { "models.ChannelPreview": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -3877,8 +3883,14 @@
"internal_name": { "internal_name": {
"type": "string" "type": "string"
}, },
"messages_sent": {
"type": "integer"
},
"owner_user_id": { "owner_user_id": {
"type": "string" "type": "string"
},
"subscription": {
"$ref": "#/definitions/models.Subscription"
} }
} }
}, },
@ -4069,6 +4081,10 @@
"channel_internal_name": { "channel_internal_name": {
"type": "string" "type": "string"
}, },
"channel_owner_user_id": {
"description": "user that owns the channel",
"type": "string"
},
"content": { "content": {
"type": "string" "type": "string"
}, },
@ -4125,6 +4141,10 @@
"models.Subscription": { "models.Subscription": {
"type": "object", "type": "object",
"properties": { "properties": {
"active": {
"description": "Subscriber has activated the subscription (default)",
"type": "boolean"
},
"channel_id": { "channel_id": {
"type": "string" "type": "string"
}, },
@ -4135,6 +4155,7 @@
"type": "string" "type": "string"
}, },
"confirmed": { "confirmed": {
"description": "Channel-Owner confirmed subscription",
"type": "boolean" "type": "boolean"
}, },
"subscriber_user_id": { "subscriber_user_id": {

View File

@ -421,6 +421,8 @@ definitions:
type: object type: object
handler.UpdateSubscription.body: handler.UpdateSubscription.body:
properties: properties:
active:
type: boolean
confirmed: confirmed:
type: boolean type: boolean
type: object type: object
@ -508,6 +510,33 @@ definitions:
uri: uri:
type: string type: string
type: object type: object
models.Channel:
properties:
channel_id:
type: string
description_name:
description: = DescriptionName, (optional), longer description text, initally
nil
type: string
display_name:
description: = DisplayName, used for display purposes, can be changed, initially
equals InternalName
type: string
internal_name:
description: = InternalName, used for sending, normalized, cannot be changed
type: string
messages_sent:
type: integer
owner_user_id:
type: string
subscribe_key:
description: can be nil, depending on endpoint
type: string
timestamp_created:
type: string
timestamp_lastsent:
type: string
type: object
models.ChannelPreview: models.ChannelPreview:
properties: properties:
channel_id: channel_id:
@ -518,8 +547,12 @@ definitions:
type: string type: string
internal_name: internal_name:
type: string type: string
messages_sent:
type: integer
owner_user_id: owner_user_id:
type: string type: string
subscription:
$ref: '#/definitions/models.Subscription'
type: object type: object
models.ChannelWithSubscription: models.ChannelWithSubscription:
properties: properties:
@ -650,6 +683,9 @@ definitions:
type: string type: string
channel_internal_name: channel_internal_name:
type: string type: string
channel_owner_user_id:
description: user that owns the channel
type: string
content: content:
type: string type: string
message_id: message_id:
@ -688,6 +724,9 @@ definitions:
type: object type: object
models.Subscription: models.Subscription:
properties: properties:
active:
description: Subscriber has activated the subscription (default)
type: boolean
channel_id: channel_id:
type: string type: string
channel_internal_name: channel_internal_name:
@ -695,6 +734,7 @@ definitions:
channel_owner_user_id: channel_owner_user_id:
type: string type: string
confirmed: confirmed:
description: Channel-Owner confirmed subscription
type: boolean type: boolean
subscriber_user_id: subscriber_user_id:
type: string type: string
@ -1418,9 +1458,6 @@ paths:
type: string type: string
name: channel_id name: channel_id
type: array type: array
- in: query
name: filter
type: string
- in: query - in: query
name: has_sender name: has_sender
type: boolean type: boolean
@ -1436,12 +1473,24 @@ paths:
type: integer type: integer
name: priority name: priority
type: array type: array
- collectionFormat: csv
in: query
items:
type: string
name: search
type: array
- collectionFormat: csv - collectionFormat: csv
in: query in: query
items: items:
type: string type: string
name: sender name: sender
type: array type: array
- collectionFormat: csv
in: query
items:
type: string
name: string_search
type: array
- in: query - in: query
name: trimmed name: trimmed
type: boolean type: boolean
@ -1580,7 +1629,7 @@ paths:
get: get:
operationId: api-tokenkeys-get-preview operationId: api-tokenkeys-get-preview
parameters: parameters:
- description: TokenKeyID - description: TokenKeyID (actual token || token-id)
in: path in: path
name: kid name: kid
required: true required: true
@ -1731,24 +1780,13 @@ paths:
tags: tags:
- API-v2 - API-v2
patch: patch:
description: The body-values are optional, only send the ones you want to update operationId: api-user-delete
operationId: api-user-update
parameters: parameters:
- description: UserID - description: UserID
in: path in: path
name: uid name: uid
required: true required: true
type: string type: string
- description: Change the username (send an empty string to clear it)
in: body
name: username
schema:
type: string
- description: Send a verification of premium purchase
in: body
name: pro_token
schema:
type: string
responses: responses:
"200": "200":
description: OK description: OK
@ -1770,7 +1808,8 @@ paths:
description: internal server error description: internal server error
schema: schema:
$ref: '#/definitions/ginresp.apiError' $ref: '#/definitions/ginresp.apiError'
summary: (Partially) update a user summary: (Self-)Deletes a user (including all entities - all messages, channels,
clients, .....)
tags: tags:
- API-v2 - API-v2
/api/v2/users/{uid}/channels: /api/v2/users/{uid}/channels:
@ -1895,7 +1934,7 @@ paths:
tags: tags:
- API-v2 - API-v2
patch: patch:
operationId: api-channels-update operationId: api-channels-delete
parameters: parameters:
- description: UserID - description: UserID
in: path in: path
@ -1907,27 +1946,11 @@ paths:
name: cid name: cid
required: true required: true
type: string type: string
- description: Send `true` to create a new subscribe_key
in: body
name: subscribe_key
schema:
type: string
- description: Send `true` to create a new send_key
in: body
name: send_key
schema:
type: string
- description: Change the cahnnel display-name (only chnages to lowercase/uppercase
are allowed - internal_name must stay the same)
in: body
name: display_name
schema:
type: string
responses: responses:
"200": "200":
description: OK description: OK
schema: schema:
$ref: '#/definitions/models.ChannelWithSubscription' $ref: '#/definitions/models.Channel'
"400": "400":
description: supplied values/parameters cannot be parsed / are invalid description: supplied values/parameters cannot be parsed / are invalid
schema: schema:
@ -1944,7 +1967,7 @@ paths:
description: internal server error description: internal server error
schema: schema:
$ref: '#/definitions/ginresp.apiError' $ref: '#/definitions/ginresp.apiError'
summary: (Partially) update a channel summary: delete a channel (including all messages, subscriptions, etc)
tags: tags:
- API-v2 - API-v2
/api/v2/users/{uid}/channels/{cid}/messages: /api/v2/users/{uid}/channels/{cid}/messages:
@ -1956,9 +1979,6 @@ paths:
By default we return long messages with a trimmed body, if trimmed=false is supplied we return full messages (this reduces the max page_size) By default we return long messages with a trimmed body, if trimmed=false is supplied we return full messages (this reduces the max page_size)
operationId: api-channel-messages operationId: api-channel-messages
parameters: parameters:
- in: query
name: filter
type: string
- in: query - in: query
name: next_page_token name: next_page_token
type: string type: string

View File

@ -1,13 +1,14 @@
package test package test
import ( import (
"blackforestbytes.com/simplecloudnotifier/api/apierr"
tt "blackforestbytes.com/simplecloudnotifier/test/util"
"fmt" "fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"strings" "strings"
"testing" "testing"
"blackforestbytes.com/simplecloudnotifier/api/apierr"
tt "blackforestbytes.com/simplecloudnotifier/test/util"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
func TestCreateChannel(t *testing.T) { func TestCreateChannel(t *testing.T) {
@ -1233,3 +1234,94 @@ func TestChannelMessageCounter(t *testing.T) {
assertCounter(6, 1, 3) assertCounter(6, 1, 3)
} }
func TestDeleteChannel(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
// Initialize default data set
data := tt.InitDefaultData(t, ws)
// User 16 owns channels, User 1 will subscribe
user16 := data.User[16]
user1 := data.User[1]
// Find channel "Chan2" belonging to user 16
var chan2 tt.ChanData
for _, ch := range user16.Channels {
if ch.InternalName == "Chan2" {
chan2 = ch
break
}
}
tt.AssertNotEqual(t, "Channel Chan2 ID", "", chan2.ChannelID) // Ensure channel was found
// --- Subscribe User 1 to User 16's Chan2 ---
chanInfo := tt.RequestAuthGet[gin.H](t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s", user16.UID, chan2.ChannelID))
subKey := chanInfo["subscribe_key"].(string)
subReq := tt.RequestAuthPost[gin.H](t, user1.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions?chan_subscribe_key=%s", user1.UID, subKey), gin.H{
"channel_id": chan2.ChannelID, // Provide channel ID for subscription
})
subscriptionID := subReq["subscription_id"].(string)
// Confirm subscription by owner (user 16)
tt.RequestAuthPatch[gin.H](t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user16.UID, subscriptionID), gin.H{
"confirmed": true,
})
// --- Pre-checks ---
// 1. Check channel exists
tt.RequestAuthGet[gin.H](t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s", user16.UID, chan2.ChannelID))
// 2. Check channel messages exist (assuming mglist type from previous tests)
type msg struct {
MessageId string `json:"message_id"`
}
type mglist struct {
Messages []msg `json:"messages"`
}
msgs := tt.RequestAuthGet[mglist](t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user16.UID, chan2.ChannelID))
tt.AssertTrue(t, "pre-check messages exist", len(msgs.Messages) > 0)
// 3. Check subscription exists for User 1 (outgoing)
type subobj struct {
SubscriptionId string `json:"subscription_id"`
}
type sublist struct {
Subscriptions []subobj `json:"subscriptions"`
}
subs1 := tt.RequestAuthGet[sublist](t, user1.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions?direction=outgoing", user1.UID))
foundSub1 := langext.ArrAny(subs1.Subscriptions, func(v subobj) bool { return v.SubscriptionId == subscriptionID })
tt.AssertTrue(t, "pre-check user1 subs outgoing", foundSub1)
// 4. Check subscription exists for User 16 (incoming)
subs16 := tt.RequestAuthGet[sublist](t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions?direction=incoming", user16.UID))
foundSub16 := langext.ArrAny(subs16.Subscriptions, func(v subobj) bool { return v.SubscriptionId == subscriptionID })
tt.AssertTrue(t, "pre-check user16 subs incoming", foundSub16)
// --- Delete Channel ---
tt.RequestAuthDelete[tt.Void](t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s", user16.UID, chan2.ChannelID), nil)
// --- Post-checks ---
// 1. Check channel fetch fails
tt.RequestAuthGetShouldFail(t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s", user16.UID, chan2.ChannelID), 404, apierr.CHANNEL_NOT_FOUND)
// 2. Check channel messages fetch fails
tt.RequestAuthGetShouldFail(t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user16.UID, chan2.ChannelID), 404, apierr.CHANNEL_NOT_FOUND)
// Check subscriber cannot fetch messages either
tt.RequestAuthGetShouldFail(t, user1.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user16.UID, chan2.ChannelID), 404, apierr.CHANNEL_NOT_FOUND) // Auth fails because subscription is gone
// 3. Check subscription is gone for User 1
subs1After := tt.RequestAuthGet[sublist](t, user1.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions?direction=outgoing", user1.UID))
foundSub1After := langext.ArrAny(subs1After.Subscriptions, func(v subobj) bool { return v.SubscriptionId == subscriptionID })
tt.AssertEqual(t, "post-check user1 subs outgoing", false, foundSub1After)
// 4. Check subscription is gone for User 16
subs16After := tt.RequestAuthGet[sublist](t, user16.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions?direction=incoming", user16.UID))
foundSub16After := langext.ArrAny(subs16After.Subscriptions, func(v subobj) bool { return v.SubscriptionId == subscriptionID })
tt.AssertEqual(t, "post-check user16 subs incoming", false, foundSub16After)
}

View File

@ -304,6 +304,8 @@ func TestPrimaryDB_Migrate_from_3_to_latest(t *testing.T) {
tt.AssertAny(dbf3) tt.AssertAny(dbf3)
tt.AssertAny(conf) tt.AssertAny(conf)
schemavers := 3
{ {
url := fmt.Sprintf("file:%s", dbf1) url := fmt.Sprintf("file:%s", dbf1)
@ -312,8 +314,6 @@ func TestPrimaryDB_Migrate_from_3_to_latest(t *testing.T) {
qqdb := sq.NewDB(xdb, sq.DBOptions{}) qqdb := sq.NewDB(xdb, sq.DBOptions{})
schemavers := 3
dbschema := schema.PrimarySchema[schemavers] dbschema := schema.PrimarySchema[schemavers]
_, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{}) _, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{})
@ -351,7 +351,7 @@ func TestPrimaryDB_Migrate_from_3_to_latest(t *testing.T) {
schema1, err := db1.ReadSchema(tctx) schema1, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err) tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema1", 3, schema1) tt.AssertEqual(t, "schema1", schemavers, schema1)
err = tctx.CommitTransaction() err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err) tt.TestFailIfErr(t, err)
@ -400,6 +400,8 @@ func TestPrimaryDB_Migrate_from_4_to_latest(t *testing.T) {
tt.AssertAny(dbf3) tt.AssertAny(dbf3)
tt.AssertAny(conf) tt.AssertAny(conf)
schemavers := 4
{ {
url := fmt.Sprintf("file:%s", dbf1) url := fmt.Sprintf("file:%s", dbf1)
@ -408,8 +410,6 @@ func TestPrimaryDB_Migrate_from_4_to_latest(t *testing.T) {
qqdb := sq.NewDB(xdb, sq.DBOptions{}) qqdb := sq.NewDB(xdb, sq.DBOptions{})
schemavers := 4
dbschema := schema.PrimarySchema[schemavers] dbschema := schema.PrimarySchema[schemavers]
_, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{}) _, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{})
@ -447,7 +447,391 @@ func TestPrimaryDB_Migrate_from_4_to_latest(t *testing.T) {
schema1, err := db1.ReadSchema(tctx) schema1, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err) tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema1", 4, schema1) tt.AssertEqual(t, "schema1", schemavers, schema1)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
//================================================
{
err = db1.Migrate(ctx)
tt.TestFailIfErr(t, err)
}
//================================================
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema2, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema2", schema.PrimarySchemaVersion, schema2)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, db1.DB())
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", schema.PrimarySchema[schema.PrimarySchemaVersion].Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = db1.Stop(ctx)
tt.TestFailIfErr(t, err)
}
}
func TestPrimaryDB_Migrate_from_5_to_latest(t *testing.T) {
dbf1, dbf2, dbf3, conf, stop := tt.StartSimpleTestspace(t)
defer stop()
ctx := context.Background()
tt.AssertAny(dbf1)
tt.AssertAny(dbf2)
tt.AssertAny(dbf3)
tt.AssertAny(conf)
schemavers := 5
{
url := fmt.Sprintf("file:%s", dbf1)
xdb, err := sqlx.Open("sqlite3", url)
tt.TestFailIfErr(t, err)
qqdb := sq.NewDB(xdb, sq.DBOptions{})
dbschema := schema.PrimarySchema[schemavers]
_, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{})
tt.TestFailIfErr(t, err)
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_int) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_int = :val", sq.PP{
"key": "schema",
"val": schemavers,
})
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_txt) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_txt = :val", sq.PP{
"key": "schema_hash",
"val": dbschema.Hash,
})
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, qqdb)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", dbschema.Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = qqdb.Exit()
tt.TestFailIfErr(t, err)
}
{
db1, err := primary.NewPrimaryDatabase(conf)
tt.TestFailIfErr(t, err)
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema1, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema1", schemavers, schema1)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
//================================================
{
err = db1.Migrate(ctx)
tt.TestFailIfErr(t, err)
}
//================================================
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema2, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema2", schema.PrimarySchemaVersion, schema2)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, db1.DB())
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", schema.PrimarySchema[schema.PrimarySchemaVersion].Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = db1.Stop(ctx)
tt.TestFailIfErr(t, err)
}
}
func TestPrimaryDB_Migrate_from_6_to_latest(t *testing.T) {
dbf1, dbf2, dbf3, conf, stop := tt.StartSimpleTestspace(t)
defer stop()
ctx := context.Background()
tt.AssertAny(dbf1)
tt.AssertAny(dbf2)
tt.AssertAny(dbf3)
tt.AssertAny(conf)
schemavers := 6
{
url := fmt.Sprintf("file:%s", dbf1)
xdb, err := sqlx.Open("sqlite3", url)
tt.TestFailIfErr(t, err)
qqdb := sq.NewDB(xdb, sq.DBOptions{})
dbschema := schema.PrimarySchema[schemavers]
_, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{})
tt.TestFailIfErr(t, err)
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_int) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_int = :val", sq.PP{
"key": "schema",
"val": schemavers,
})
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_txt) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_txt = :val", sq.PP{
"key": "schema_hash",
"val": dbschema.Hash,
})
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, qqdb)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", dbschema.Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = qqdb.Exit()
tt.TestFailIfErr(t, err)
}
{
db1, err := primary.NewPrimaryDatabase(conf)
tt.TestFailIfErr(t, err)
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema1, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema1", schemavers, schema1)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
//================================================
{
err = db1.Migrate(ctx)
tt.TestFailIfErr(t, err)
}
//================================================
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema2, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema2", schema.PrimarySchemaVersion, schema2)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, db1.DB())
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", schema.PrimarySchema[schema.PrimarySchemaVersion].Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = db1.Stop(ctx)
tt.TestFailIfErr(t, err)
}
}
func TestPrimaryDB_Migrate_from_7_to_latest(t *testing.T) {
dbf1, dbf2, dbf3, conf, stop := tt.StartSimpleTestspace(t)
defer stop()
ctx := context.Background()
tt.AssertAny(dbf1)
tt.AssertAny(dbf2)
tt.AssertAny(dbf3)
tt.AssertAny(conf)
schemavers := 7
{
url := fmt.Sprintf("file:%s", dbf1)
xdb, err := sqlx.Open("sqlite3", url)
tt.TestFailIfErr(t, err)
qqdb := sq.NewDB(xdb, sq.DBOptions{})
dbschema := schema.PrimarySchema[schemavers]
_, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{})
tt.TestFailIfErr(t, err)
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_int) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_int = :val", sq.PP{
"key": "schema",
"val": schemavers,
})
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_txt) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_txt = :val", sq.PP{
"key": "schema_hash",
"val": dbschema.Hash,
})
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, qqdb)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", dbschema.Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = qqdb.Exit()
tt.TestFailIfErr(t, err)
}
{
db1, err := primary.NewPrimaryDatabase(conf)
tt.TestFailIfErr(t, err)
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema1, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema1", schemavers, schema1)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
//================================================
{
err = db1.Migrate(ctx)
tt.TestFailIfErr(t, err)
}
//================================================
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema2, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema2", schema.PrimarySchemaVersion, schema2)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, db1.DB())
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", schema.PrimarySchema[schema.PrimarySchemaVersion].Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = db1.Stop(ctx)
tt.TestFailIfErr(t, err)
}
}
func TestPrimaryDB_Migrate_from_8_to_latest(t *testing.T) {
dbf1, dbf2, dbf3, conf, stop := tt.StartSimpleTestspace(t)
defer stop()
ctx := context.Background()
tt.AssertAny(dbf1)
tt.AssertAny(dbf2)
tt.AssertAny(dbf3)
tt.AssertAny(conf)
schemavers := 8
{
url := fmt.Sprintf("file:%s", dbf1)
xdb, err := sqlx.Open("sqlite3", url)
tt.TestFailIfErr(t, err)
qqdb := sq.NewDB(xdb, sq.DBOptions{})
dbschema := schema.PrimarySchema[schemavers]
_, err = qqdb.Exec(ctx, dbschema.SQL, sq.PP{})
tt.TestFailIfErr(t, err)
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_int) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_int = :val", sq.PP{
"key": "schema",
"val": schemavers,
})
_, err = qqdb.Exec(ctx, "INSERT INTO meta (meta_key, value_txt) VALUES (:key, :val) ON CONFLICT(meta_key) DO UPDATE SET value_txt = :val", sq.PP{
"key": "schema_hash",
"val": dbschema.Hash,
})
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schemHashDB, err := sq.HashSqliteDatabase(tctx, qqdb)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schemHashDB", dbschema.Hash, schemHashDB)
err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err)
}
err = qqdb.Exit()
tt.TestFailIfErr(t, err)
}
{
db1, err := primary.NewPrimaryDatabase(conf)
tt.TestFailIfErr(t, err)
{
tctx := simplectx.CreateSimpleContext(ctx, nil)
schema1, err := db1.ReadSchema(tctx)
tt.TestFailIfErr(t, err)
tt.AssertEqual(t, "schema1", schemavers, schema1)
err = tctx.CommitTransaction() err = tctx.CommitTransaction()
tt.TestFailIfErr(t, err) tt.TestFailIfErr(t, err)

View File

@ -1,6 +1,8 @@
package test package test
import ( import (
"database/sql"
"github.com/glebarez/go-sqlite"
"gogs.mikescher.com/BlackForestBytes/goext/exerr" "gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"os" "os"
@ -12,5 +14,9 @@ func TestMain(m *testing.M) {
exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse}) exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse})
} }
if !langext.InArray("sqlite3", sql.Drivers()) {
sqlite.RegisterAsSQLITE3()
}
os.Exit(m.Run()) os.Exit(m.Run())
} }

View File

@ -1,15 +1,16 @@
package test package test
import ( import (
"blackforestbytes.com/simplecloudnotifier/api/apierr"
"blackforestbytes.com/simplecloudnotifier/models"
tt "blackforestbytes.com/simplecloudnotifier/test/util"
"fmt" "fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/url" "net/url"
"testing" "testing"
"time" "time"
"blackforestbytes.com/simplecloudnotifier/api/apierr"
"blackforestbytes.com/simplecloudnotifier/models"
tt "blackforestbytes.com/simplecloudnotifier/test/util"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
func TestSearchMessageFTSSimple(t *testing.T) { func TestSearchMessageFTSSimple(t *testing.T) {
@ -901,3 +902,293 @@ func TestListMessagesStringSearch(t *testing.T) {
tt.AssertEqual(t, "msgList.filter["+testdata.Name+"].len", testdata.Count, msgList.TotalCount) tt.AssertEqual(t, "msgList.filter["+testdata.Name+"].len", testdata.Count, msgList.TotalCount)
} }
} }
func TestDeactivatedSubscriptionListMessages(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
data := tt.InitDefaultData(t, ws)
type subobj struct {
ChannelId string `json:"channel_id"`
ChannelInternalName string `json:"channel_internal_name"`
ChannelOwnerUserId string `json:"channel_owner_user_id"`
Confirmed bool `json:"confirmed"`
Active bool `json:"active"`
SubscriberUserId string `json:"subscriber_user_id"`
SubscriptionId string `json:"subscription_id"`
TimestampCreated string `json:"timestamp_created"`
}
type msg struct {
Title string `json:"title"`
}
type mglist struct {
Messages []msg `json:"messages"`
}
user14 := data.User[14] // Subscriber
user15 := data.User[15] // Owner
chanName := "chan_other_accepted"
subscriptionID, channelID := tt.FindSubscriptionByChanName(t, baseUrl, user14, user15.UID, chanName)
gsub0 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertTrue(t, "gsub1.active", gsub0.Active)
tt.AssertTrue(t, "gsub1.confirmed", gsub0.Confirmed)
tt.RequestAuthPatch[gin.H](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{"active": false})
gsub1 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertFalse(t, "gsub1.active", gsub1.Active)
tt.AssertTrue(t, "gsub1.confirmed", gsub1.Confirmed)
gsub2 := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertFalse(t, "gsub2.active", gsub2.Active)
tt.AssertTrue(t, "gsub2.confirmed", gsub2.Confirmed)
newMessageTitle := langext.RandBase62(48)
tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
// subscription.active == false && subscription.confirmed == true
{
msgListSub := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessageSub := false
for _, m := range msgListSub.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageSub = true
break
}
}
tt.AssertFalse(t, "foundActivatedMessageSub", foundActivatedMessageSub)
msgListDirect := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user15.UID, channelID))
foundActivatedMessageDirect := false
for _, m := range msgListDirect.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageDirect = true
break
}
}
tt.AssertTrue(t, "foundActivatedMessageDirect", foundActivatedMessageDirect)
}
tt.RequestAuthPatch[gin.H](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{"active": true})
// subscription.active == true && subscription.confirmed == true
{
msgListSub := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessageSub := false
for _, m := range msgListSub.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageSub = true
break
}
}
tt.AssertTrue(t, "foundActivatedMessageSub", foundActivatedMessageSub)
msgListDirect := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user15.UID, channelID))
foundActivatedMessageDirect := false
for _, m := range msgListDirect.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageDirect = true
break
}
}
tt.AssertTrue(t, "foundActivatedMessageDirect", foundActivatedMessageDirect)
}
tt.RequestAuthPatch[gin.H](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID), gin.H{"confirmed": false})
// subscription.active == true && subscription.confirmed == false
{
msgListSub := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessageSub := false
for _, m := range msgListSub.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageSub = true
break
}
}
tt.AssertFalse(t, "foundActivatedMessageSub", foundActivatedMessageSub)
tt.RequestAuthGetShouldFail(t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user15.UID, channelID), 401, apierr.USER_AUTH_FAILED)
}
tt.RequestAuthPatch[gin.H](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{"active": false})
// subscription.active == false && subscription.confirmed == false
{
msgListSub := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessageSub := false
for _, m := range msgListSub.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageSub = true
break
}
}
tt.AssertFalse(t, "foundActivatedMessageSub", foundActivatedMessageSub)
tt.RequestAuthGetShouldFail(t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user15.UID, channelID), 401, apierr.USER_AUTH_FAILED)
}
tt.RequestAuthPatch[gin.H](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID), gin.H{"confirmed": true})
tt.RequestAuthPatch[gin.H](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{"active": true})
// subscription.active == true && subscription.confirmed == true
{
msgListSub := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessageSub := false
for _, m := range msgListSub.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageSub = true
break
}
}
tt.AssertTrue(t, "foundActivatedMessageSub", foundActivatedMessageSub)
msgListDirect := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user15.UID, channelID))
foundActivatedMessageDirect := false
for _, m := range msgListDirect.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageDirect = true
break
}
}
tt.AssertTrue(t, "foundActivatedMessageDirect", foundActivatedMessageDirect)
}
tt.RequestAuthDelete[gin.H](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{})
// subscription -> deleted
{
msgListSub := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessageSub := false
for _, m := range msgListSub.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageSub = true
break
}
}
tt.AssertFalse(t, "foundActivatedMessageSub", foundActivatedMessageSub)
tt.RequestAuthGetShouldFail(t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/channels/%s/messages", user15.UID, channelID), 401, apierr.USER_AUTH_FAILED)
}
}
func TestActiveSubscriptionListMessages(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
data := tt.InitDefaultData(t, ws)
type subobj struct {
ChannelId string `json:"channel_id"`
ChannelInternalName string `json:"channel_internal_name"`
ChannelOwnerUserId string `json:"channel_owner_user_id"`
Confirmed bool `json:"confirmed"`
Active bool `json:"active"`
SubscriberUserId string `json:"subscriber_user_id"`
SubscriptionId string `json:"subscription_id"`
TimestampCreated string `json:"timestamp_created"`
}
type msg struct {
Title string `json:"title"`
}
type mglist struct {
Messages []msg `json:"messages"`
}
user14 := data.User[14] // Subscriber
user15 := data.User[15] // Owner
chanName := "chan_other_accepted"
subscriptionID, _ := tt.FindSubscriptionByChanName(t, baseUrl, user14, user15.UID, chanName)
gsub1 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertTrue(t, "gsub1.active", gsub1.Active)
newMessageTitle := langext.RandBase62(48)
tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
{
msgListSub := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessageSub := false
for _, m := range msgListSub.Messages {
if m.Title == newMessageTitle {
foundActivatedMessageSub = true
break
}
}
tt.AssertTrue(t, "foundActivatedMessageSub", foundActivatedMessageSub)
}
}
func TestUnconfirmedSubscriptionListMessages(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
data := tt.InitDefaultData(t, ws)
type subobj struct {
ChannelId string `json:"channel_id"`
ChannelInternalName string `json:"channel_internal_name"`
ChannelOwnerUserId string `json:"channel_owner_user_id"`
Confirmed bool `json:"confirmed"`
Active bool `json:"active"`
SubscriberUserId string `json:"subscriber_user_id"`
SubscriptionId string `json:"subscription_id"`
TimestampCreated string `json:"timestamp_created"`
}
type msg struct {
Title string `json:"title"`
}
type mglist struct {
Messages []msg `json:"messages"`
}
user14 := data.User[14] // Subscriber
user15 := data.User[15] // Owner
chanName := "chan_other_request"
subscriptionID, _ := tt.FindSubscriptionByChanName(t, baseUrl, user14, user15.UID, chanName)
gsub1 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertTrue(t, "gsub1.active", gsub1.Active)
tt.AssertFalse(t, "gsub1.confirmed", gsub1.Confirmed)
newMessageTitle := langext.RandBase62(48)
tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
msgList := tt.RequestAuthGet[mglist](t, user14.AdminKey, baseUrl, "/api/v2/messages")
foundActivatedMessage := false
for _, m := range msgList.Messages {
if m.Title == newMessageTitle {
foundActivatedMessage = true
break
}
}
tt.AssertFalse(t, "foundActivatedMessage", foundActivatedMessage)
}

View File

@ -33,6 +33,7 @@ func TestResponseChannel(t *testing.T) {
"channel_internal_name": "string", "channel_internal_name": "string",
"timestamp_created": "rfc3339", "timestamp_created": "rfc3339",
"confirmed": "bool", "confirmed": "bool",
"active": "bool",
}, },
}) })
} }
@ -179,6 +180,7 @@ func TestResponseMessage(t *testing.T) {
"sender_user_id": "id", "sender_user_id": "id",
"channel_internal_name": "string", "channel_internal_name": "string",
"channel_id": "id", "channel_id": "id",
"channel_owner_user_id": "id",
"sender_name": "string", "sender_name": "string",
"sender_ip": "string", "sender_ip": "string",
"timestamp": "rfc3339", "timestamp": "rfc3339",
@ -207,6 +209,7 @@ func TestResponseSubscription(t *testing.T) {
"channel_internal_name": "string", "channel_internal_name": "string",
"timestamp_created": "rfc3339", "timestamp_created": "rfc3339",
"confirmed": "bool", "confirmed": "bool",
"active": "bool",
}) })
} }

View File

@ -7,6 +7,7 @@ import (
tt "blackforestbytes.com/simplecloudnotifier/test/util" tt "blackforestbytes.com/simplecloudnotifier/test/util"
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"math/rand/v2" "math/rand/v2"
"net/url" "net/url"
"strings" "strings"
@ -1807,3 +1808,176 @@ func TestSendWithPermissionSendKey(t *testing.T) {
func TestSendDeliveryRetry(t *testing.T) { func TestSendDeliveryRetry(t *testing.T) {
t.SkipNow() //TODO t.SkipNow() //TODO
} }
func TestDeactivatedSubscriptionReceiveMessage(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
data := tt.InitDefaultData(t, ws)
type subobj struct {
ChannelId string `json:"channel_id"`
ChannelInternalName string `json:"channel_internal_name"`
ChannelOwnerUserId string `json:"channel_owner_user_id"`
Confirmed bool `json:"confirmed"`
Active bool `json:"active"`
SubscriberUserId string `json:"subscriber_user_id"`
SubscriptionId string `json:"subscription_id"`
TimestampCreated string `json:"timestamp_created"`
}
pusher := ws.Pusher.(*push.TestSink)
pusher.Clear()
user14 := data.User[14] // Subscriber
user15 := data.User[15] // Owner
chanName := "chan_other_accepted"
subscriptionID, _ := tt.FindSubscriptionByChanName(t, baseUrl, user14, user15.UID, chanName)
gsub0 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertTrue(t, "gsub0.active", gsub0.Active)
tt.AssertTrue(t, "gsub0.confirmed", gsub0.Confirmed)
tt.RequestAuthPatch[gin.H](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{"active": false})
gsub1 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertFalse(t, "gsub1.active", gsub1.Active)
tt.AssertTrue(t, "gsub1.confirmed", gsub1.Confirmed)
// sub is active=false && confirmed=true
{
newMessageTitle := langext.RandBase62(48)
tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
pushObj := langext.ArrFirstOrNil(pusher.Data, func(d push.SinkData) bool { return d.Client.UserID.String() == user14.UID })
tt.AssertNil(t, "pushObj", pushObj)
pusher.Clear()
}
tt.RequestAuthPatch[gin.H](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{"active": true})
gsub2 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertTrue(t, "gsub1.active", gsub2.Active)
tt.AssertTrue(t, "gsub1.confirmed", gsub2.Confirmed)
// sub is active=true && confirmed=true
{
newMessageTitle := langext.RandBase62(48)
msg1 := tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
pushObj := langext.ArrFirstOrNil(pusher.Data, func(d push.SinkData) bool { return d.Client.UserID.String() == user14.UID })
tt.AssertNotNil(t, "pushObj", pushObj)
tt.AssertStrRepEqual(t, "msg.title", newMessageTitle, pushObj.Message.Title)
tt.AssertStrRepEqual(t, "msg.content", nil, pushObj.Message.Content)
tt.AssertStrRepEqual(t, "msg.scn_msg_id", msg1["scn_msg_id"], pushObj.Message.MessageID)
tt.AssertStrRepEqual(t, "msg.scn_msg_id", user14.UID, pushObj.Client.UserID)
pusher.Clear()
}
tt.RequestAuthPatch[gin.H](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID), gin.H{"confirmed": false})
gsub3 := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertTrue(t, "gsub1.active", gsub3.Active)
tt.AssertFalse(t, "gsub1.confirmed", gsub3.Confirmed)
// sub is active=true && confirmed=false
{
newMessageTitle := langext.RandBase62(48)
tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
pushObj := langext.ArrFirstOrNil(pusher.Data, func(d push.SinkData) bool { return d.Client.UserID.String() == user14.UID })
tt.AssertNil(t, "pushObj", pushObj)
pusher.Clear()
}
tt.RequestAuthDelete[gin.H](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID), gin.H{})
tt.RequestAuthGetShouldFail(t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), 404, apierr.SUBSCRIPTION_NOT_FOUND)
tt.RequestAuthGetShouldFail(t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID), 404, apierr.SUBSCRIPTION_NOT_FOUND)
// sub is deleted
{
newMessageTitle := langext.RandBase62(48)
tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
pushObj := langext.ArrFirstOrNil(pusher.Data, func(d push.SinkData) bool { return d.Client.UserID.String() == user14.UID })
tt.AssertNil(t, "pushObj", pushObj)
pusher.Clear()
}
}
func TestActiveSubscriptionReceiveMessage(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
data := tt.InitDefaultData(t, ws)
type subobj struct {
ChannelId string `json:"channel_id"`
ChannelInternalName string `json:"channel_internal_name"`
ChannelOwnerUserId string `json:"channel_owner_user_id"`
Confirmed bool `json:"confirmed"`
Active bool `json:"active"`
SubscriberUserId string `json:"subscriber_user_id"`
SubscriptionId string `json:"subscription_id"`
TimestampCreated string `json:"timestamp_created"`
}
pusher := ws.Pusher.(*push.TestSink)
pusher.Clear()
user14 := data.User[14] // Subscriber
user15 := data.User[15] // Owner
chanName := "chan_other_accepted"
subscriptionID, _ := tt.FindSubscriptionByChanName(t, baseUrl, user14, user15.UID, chanName)
gsub1 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertTrue(t, "gsub1.active", gsub1.Active)
tt.AssertTrue(t, "gsub1.confirmed", gsub1.Confirmed)
newMessageTitle := langext.RandBase62(48)
msg1 := tt.RequestPost[gin.H](t, baseUrl, "/", gin.H{
"key": user15.AdminKey,
"user_id": user15.UID,
"channel": chanName,
"title": newMessageTitle,
})
pushObj := langext.ArrFirstOrNil(pusher.Data, func(d push.SinkData) bool { return d.Client.UserID.String() == user14.UID })
tt.AssertNotNil(t, "pushObj", pushObj)
tt.AssertStrRepEqual(t, "msg.title", newMessageTitle, pushObj.Message.Title)
tt.AssertStrRepEqual(t, "msg.content", nil, pushObj.Message.Content)
tt.AssertStrRepEqual(t, "msg.scn_msg_id", msg1["scn_msg_id"], pushObj.Message.MessageID)
tt.AssertStrRepEqual(t, "msg.scn_msg_id", user14.UID, pushObj.Client.UserID)
pusher.Clear()
}

View File

@ -1,12 +1,13 @@
package test package test
import ( import (
"fmt"
"testing"
"blackforestbytes.com/simplecloudnotifier/api/apierr" "blackforestbytes.com/simplecloudnotifier/api/apierr"
tt "blackforestbytes.com/simplecloudnotifier/test/util" tt "blackforestbytes.com/simplecloudnotifier/test/util"
"fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"testing"
) )
func TestListSubscriptionsOfUser(t *testing.T) { func TestListSubscriptionsOfUser(t *testing.T) {
@ -1240,3 +1241,106 @@ func TestCancelOutgoingSubscription(t *testing.T) {
tt.RequestAuthGetShouldFail(t, data1.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", data1.UID, sub1.SubscriptionId), 404, apierr.SUBSCRIPTION_NOT_FOUND) tt.RequestAuthGetShouldFail(t, data1.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", data1.UID, sub1.SubscriptionId), 404, apierr.SUBSCRIPTION_NOT_FOUND)
} }
} }
func TestSubscriptionDeactivate(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
data := tt.InitDefaultData(t, ws)
user14 := data.User[14]
user15 := data.User[15]
chanName := "chan_other_accepted"
subscriptionID, _ := tt.FindSubscriptionByChanName(t, baseUrl, user14, user15.UID, chanName)
type subobj struct {
SubscriptionId string `json:"subscription_id"`
Active bool `json:"active"`
}
initialSub := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertTrue(t, "initialSub.Active", initialSub.Active)
// subscriber deactivates
{
tt.RequestAuthPatch[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{
"active": false,
})
sub1 := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertEqual(t, "sub1.Active", false, sub1.Active)
sub2 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertEqual(t, "sub2.Active", false, sub2.Active)
}
// subscriber activates
{
tt.RequestAuthPatch[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{
"active": true,
})
sub1 := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertEqual(t, "sub1.Active", true, sub1.Active)
sub2 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertEqual(t, "sub2.Active", true, sub2.Active)
}
// owner deactivates
{
tt.RequestAuthPatch[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID), gin.H{
"active": false,
})
sub1 := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertEqual(t, "sub1.Active", false, sub1.Active)
sub2 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertEqual(t, "sub2.Active", false, sub2.Active)
}
// owner activates
{
tt.RequestAuthPatch[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID), gin.H{
"active": true,
})
sub1 := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertEqual(t, "sub1.Active", true, sub1.Active)
sub2 := tt.RequestAuthGet[subobj](t, user15.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user15.UID, subscriptionID))
tt.AssertEqual(t, "sub2.Active", true, sub2.Active)
}
}
func TestSubscriptionActivate(t *testing.T) {
ws, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop()
data := tt.InitDefaultData(t, ws)
user14 := data.User[14]
user15 := data.User[15]
chanName := "chan_other_accepted"
subscriptionID, _ := tt.FindSubscriptionByChanName(t, baseUrl, user14, user15.UID, chanName)
type subobj struct {
SubscriptionId string `json:"subscription_id"`
Active bool `json:"active"`
}
tt.RequestAuthPatch[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{
"active": false,
})
deactivatedSub := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertEqual(t, "deactivatedSub.Active", false, deactivatedSub.Active)
tt.RequestAuthPatch[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID), gin.H{
"active": true,
})
finalSub := tt.RequestAuthGet[subobj](t, user14.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", user14.UID, subscriptionID))
tt.AssertTrue(t, "finalSub.Active", finalSub.Active)
}

View File

@ -185,8 +185,6 @@ func TestFailedUgradeUserToPro(t *testing.T) {
} }
func TestDeleteUser(t *testing.T) { func TestDeleteUser(t *testing.T) {
t.SkipNow() // TODO DeleteUser Not implemented
_, baseUrl, stop := tt.StartSimpleWebserver(t) _, baseUrl, stop := tt.StartSimpleWebserver(t)
defer stop() defer stop()
@ -199,15 +197,18 @@ func TestDeleteUser(t *testing.T) {
uid := fmt.Sprintf("%v", r0["user_id"]) uid := fmt.Sprintf("%v", r0["user_id"])
admintok := r0["admin_key"].(string) admintok := r0["admin_key"].(string)
readtok := r0["read_key"].(string)
sendtok := r0["send_key"].(string)
tt.RequestAuthGet[gin.H](t, admintok, baseUrl, "/api/v2/users/"+uid) tt.RequestAuthGet[gin.H](t, admintok, baseUrl, "/api/v2/users/"+uid)
tt.RequestAuthDeleteShouldFail(t, admintok, baseUrl, "/api/v2/users/"+uid, nil, 401, apierr.USER_AUTH_FAILED) tt.RequestAuthDeleteShouldFail(t, readtok, baseUrl, "/api/v2/users/"+uid, nil, 401, apierr.USER_AUTH_FAILED)
tt.RequestAuthDeleteShouldFail(t, sendtok, baseUrl, "/api/v2/users/"+uid, nil, 401, apierr.USER_AUTH_FAILED)
tt.RequestAuthDelete[tt.Void](t, admintok, baseUrl, "/api/v2/users/"+uid, nil) tt.RequestAuthDelete[tt.Void](t, admintok, baseUrl, "/api/v2/users/"+uid, nil)
tt.RequestAuthGetShouldFail(t, admintok, baseUrl, "/api/v2/users/"+uid, 404, apierr.USER_NOT_FOUND) tt.RequestAuthGetShouldFail(t, admintok, baseUrl, "/api/v2/users/"+uid, 401, apierr.USER_AUTH_FAILED)
} }
func TestCreateProUser(t *testing.T) { func TestCreateProUser(t *testing.T) {

View File

@ -177,6 +177,14 @@ func AssertTrue(t *testing.T, key string, v bool) {
} }
} }
func AssertFalse(t *testing.T, key string, v bool) {
if v {
t.Errorf("AssertFalse(%s) failed", key)
t.Error(string(debug.Stack()))
t.FailNow()
}
}
func AssertNotDefault[T comparable](t *testing.T, key string, v T) { func AssertNotDefault[T comparable](t *testing.T, key string, v T) {
if v == *new(T) { if v == *new(T) {
t.Errorf("AssertNotDefault(%s) failed", key) t.Errorf("AssertNotDefault(%s) failed", key)
@ -307,7 +315,7 @@ func AssertAny(v any) {
} }
func AssertNil(t *testing.T, key string, v any) { func AssertNil(t *testing.T, key string, v any) {
if v != nil { if !langext.IsNil(v) {
t.Errorf("AssertNil(%s) failed - actual value:\n%+v", key, v) t.Errorf("AssertNil(%s) failed - actual value:\n%+v", key, v)
t.Error(string(debug.Stack())) t.Error(string(debug.Stack()))
t.FailNow() t.FailNow()
@ -315,7 +323,7 @@ func AssertNil(t *testing.T, key string, v any) {
} }
func AssertNotNil(t *testing.T, key string, v any) { func AssertNotNil(t *testing.T, key string, v any) {
if v == nil { if langext.IsNil(v) {
t.Errorf("AssertNotNil(%s) failed", key) t.Errorf("AssertNotNil(%s) failed", key)
t.Error(string(debug.Stack())) t.Error(string(debug.Stack()))
t.FailNow() t.FailNow()

View File

@ -467,8 +467,10 @@ func InitDefaultData(t *testing.T, ws *logic.Application) DefData {
users[i].Subscriptions = langext.ArrMap(r0.Subs, func(v ssub) string { return v.ID }) users[i].Subscriptions = langext.ArrMap(r0.Subs, func(v ssub) string { return v.ID })
} }
// Sub/Unsub for Users 12+13 // Sub/Unsub for Users 14+15
// - User 14 is not subscribed to (own) channel "chan_self_unsub"
// - User 14 has an unconfirmed request to User15's channel "chan_other_request"
// - User 14 has a confirmed+active subscription to User15's channel "chan_other_accepted"
{ {
doUnsubscribe(t, baseUrl, users[14], users[14], "chan_self_unsub") doUnsubscribe(t, baseUrl, users[14], users[14], "chan_self_unsub")
doSubscribe(t, baseUrl, users[14], users[15], "chan_other_request") doSubscribe(t, baseUrl, users[14], users[15], "chan_other_request")

View File

@ -0,0 +1,34 @@
package util
import (
"fmt"
"testing"
)
func FindSubscriptionByChanName(t *testing.T, baseUrl string, subscriber Userdat, ownerUID string, chanName string) (subscriptionID string, channelID string) {
type subobj struct {
SubscriptionId string `json:"subscription_id"`
ChannelId string `json:"channel_id"`
ChannelInternalName string `json:"channel_internal_name"`
ChannelOwnerUserId string `json:"channel_owner_user_id"`
Confirmed bool `json:"confirmed"`
Active bool `json:"active"`
}
type sublist struct {
Subscriptions []subobj `json:"subscriptions"`
}
subs := RequestAuthGet[sublist](t, subscriber.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions?direction=outgoing", subscriber.UID))
for _, sub := range subs.Subscriptions {
if sub.ChannelOwnerUserId == ownerUID && sub.ChannelInternalName == chanName {
fullSub := RequestAuthGet[subobj](t, subscriber.AdminKey, baseUrl, fmt.Sprintf("/api/v2/users/%s/subscriptions/%s", subscriber.UID, sub.SubscriptionId))
if fullSub.ChannelOwnerUserId == ownerUID && fullSub.ChannelInternalName == chanName {
return fullSub.SubscriptionId, fullSub.ChannelId
}
}
}
t.Fatalf("Could not find subscription for user %s to channel %s owned by %s", subscriber.UID, chanName, ownerUID)
return "", ""
}