create migration script for old data

This commit is contained in:
Mike Schwörer 2023-01-15 06:30:30 +01:00
parent 82bc887767
commit 89fd0dfed7
Signed by: Mikescher
GPG Key ID: D3C7172E0A70F8CF
34 changed files with 1617 additions and 58 deletions

58
androidExportReader/.gitignore vendored Normal file
View File

@ -0,0 +1,58 @@
# Created by https://www.toptal.com/developers/gitignore/api/java,gradle
# Edit at https://www.toptal.com/developers/gitignore?templates=java,gradle
### Java ###
# Compiled class file
*.class
# Log file
*.log
# BlueJ files
*.ctxt
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
*.jar
*.war
*.nar
*.ear
*.zip
*.tar.gz
*.rar
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
replay_pid*
### Gradle ###
.gradle
**/build/
!src/**/build/
# Ignore Gradle GUI config
gradle-app.setting
# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
!gradle-wrapper.jar
# Avoid ignore Gradle wrappper properties
!gradle-wrapper.properties
# Cache of project
.gradletasknamecache
# Eclipse Gradle plugin generated files
# Eclipse Core
.project
# JDT-specific (Eclipse Java Development Tools)
.classpath
### Gradle Patch ###
# Java heap dump
*.hprof
# End of https://www.toptal.com/developers/gitignore/api/java,gradle

8
androidExportReader/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<bytecodeTargetLevel target="18" />
</component>
</project>

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
</GradleProjectSettings>
</option>
</component>
</project>

View File

@ -0,0 +1,11 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="JavadocReference" enabled="true" level="WARNING" enabled_by_default="true" editorAttributes="WARNING_ATTRIBUTES" />
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
<option name="processCode" value="true" />
<option name="processLiterals" value="true" />
<option name="processComments" value="true" />
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="MavenRepo" />
<option name="name" value="MavenRepo" />
<option name="url" value="https://repo.maven.apache.org/maven2/" />
</remote-repository>
<remote-repository>
<option name="id" value="maven" />
<option name="name" value="maven" />
<option name="url" value="https://jitpack.io" />
</remote-repository>
</component>
</project>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="FrameworkDetectionExcludesConfiguration">
<file type="web" url="file://$PROJECT_DIR$" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_18" default="true" project-jdk-name="openjdk-18" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@ -0,0 +1,47 @@
buildscript {
repositories {
gradlePluginPortal()
}
dependencies {
classpath 'gradle.plugin.com.github.johnrengelman:shadow:7.1.2'
}
}
plugins {
id 'java'
id("com.github.johnrengelman.shadow") version "7.1.2"
id 'application'
}
group 'com.blackforestbytes'
version '1.0-SNAPSHOT'
repositories {
mavenCentral()
maven { url "https://jitpack.io" }
}
application {
mainClass = 'com.blackforestbytes.Main'
}
jar {
manifest {
attributes 'Main-Class': application.mainClass
}
}
tasks.jar {
manifest.attributes["Main-Class"] = application.mainClass
}
dependencies {
implementation 'com.github.RalleYTN:SimpleJSON:2.1.1'
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
}
test {
useJUnitPlatform()
}

Binary file not shown.

View File

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

240
androidExportReader/gradlew vendored Executable file
View File

@ -0,0 +1,240 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

91
androidExportReader/gradlew.bat vendored Normal file
View File

@ -0,0 +1,91 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,2 @@
rootProject.name = 'androidExportReader'

View File

@ -0,0 +1,104 @@
package com.blackforestbytes;
import de.ralleytn.simple.json.JSONArray;
import de.ralleytn.simple.json.JSONFormatter;
import de.ralleytn.simple.json.JSONObject;
import java.io.ObjectInputStream;
import java.net.URI;
import java.nio.file.FileSystems;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
public class Main {
@SuppressWarnings("unchecked")
public static void main(String[] args) {
if (args.length != 1) {
System.err.println("call with ./androidExportConvert scn_export.dat");
return;
}
try {
var path = FileSystems.getDefault().getPath(args[0]).normalize().toAbsolutePath().toUri().toURL();
ObjectInputStream stream = new ObjectInputStream(path.openStream());
Map<String, ?> d1 = new HashMap<>((Map<String, ?>)stream.readObject());
Map<String, ?> d2 = new HashMap<>((Map<String, ?>)stream.readObject());
Map<String, ?> d3 = new HashMap<>((Map<String, ?>)stream.readObject());
Map<String, ?> d4 = new HashMap<>((Map<String, ?>)stream.readObject());
stream.close();
JSONObject root = new JSONObject();
var subConfig = new JSONObject();
var subIAB = new JSONArray();
var subCMessageList = new JSONArray();
var subAcks = new JSONArray();
var subQueryLog = new JSONArray();
for (Map.Entry<String, ?> entry : d1.entrySet())
{
if (entry.getValue() instanceof String) subConfig.put(entry.getKey(), (String)entry.getValue());
if (entry.getValue() instanceof Boolean) subConfig.put(entry.getKey(), (Boolean)entry.getValue());
if (entry.getValue() instanceof Float) subConfig.put(entry.getKey(), (Float)entry.getValue());
if (entry.getValue() instanceof Integer) subConfig.put(entry.getKey(), (Integer)entry.getValue());
if (entry.getValue() instanceof Long) subConfig.put(entry.getKey(), (Long)entry.getValue());
if (entry.getValue() instanceof Set<?>) subConfig.put(entry.getKey(), ((Set<String>)entry.getValue()).toArray());
}
for (int i = 0; i < (Integer)d2.get("c"); i++) {
var obj = new JSONObject();
obj.put("key", d2.get("["+i+"]->key"));
obj.put("value", d2.get("["+i+"]->value"));
subIAB.add(obj);
}
for (int i = 0; i < (Integer)d3.get("message_count"); i++) {
if (d3.get("message["+i+"].scnid") == null)
throw new Exception("ONF");
var obj = new JSONObject();
obj.put("timestamp", d3.get("message["+i+"].timestamp"));
obj.put("title", d3.get("message["+i+"].title"));
obj.put("content", d3.get("message["+i+"].content"));
obj.put("priority", d3.get("message["+i+"].priority"));
obj.put("scnid", d3.get("message["+i+"].scnid"));
subCMessageList.add(obj);
}
subAcks.addAll(((Set<String>)d3.get("acks")).stream().map(p -> Long.decode("0x"+p)).toList());
for (int i = 0; i < (Integer)d4.get("history_count"); i++) {
if (d4.get("message["+(i+1000)+"].Name") == null)
throw new Exception("ONF");
var obj = new JSONObject();
obj.put("Level", d4.get("message["+(i+1000)+"].Level"));
obj.put("Timestamp", d4.get("message["+(i+1000)+"].Timestamp"));
obj.put("Name", d4.get("message["+(i+1000)+"].Name"));
obj.put("URL", d4.get("message["+(i+1000)+"].URL"));
obj.put("Response", d4.get("message["+(i+1000)+"].Response"));
obj.put("ResponseCode", d4.get("message["+(i+1000)+"].ResponseCode"));
obj.put("ExceptionString", d4.get("message["+(i+1000)+"].ExceptionString"));
subQueryLog.add(obj);
}
root.put("config", subConfig);
root.put("iab", subIAB);
root.put("cmessagelist", subCMessageList);
root.put("acks", subAcks);
root.put("querylog", subQueryLog);
System.out.println(new JSONFormatter().format(root.toString()));
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -5,6 +5,8 @@ _build
DOCKER_GIT_INFO
scn_export.dat
scn_export.json
##############

View File

@ -5,7 +5,7 @@
<file url="PROJECT" dialect="SQLite" />
</component>
<component name="SqlResolveMappings">
<file url="file://$PROJECT_DIR$/db/impl/primary/database.go" scope="{&quot;node&quot;:{ &quot;@negative&quot;:&quot;1&quot;, &quot;group&quot;:{ &quot;@kind&quot;:&quot;root&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;b3228d61-4c36-41ce-803f-63bd80e198b3&quot; }, &quot;group&quot;:{ &quot;@kind&quot;:&quot;schema&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;schema_3.0.ddl&quot; } } } } } }}" />
<file url="PROJECT" scope="{&quot;node&quot;:{ &quot;@negative&quot;:&quot;1&quot;, &quot;group&quot;:{ &quot;@kind&quot;:&quot;root&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;b3228d61-4c36-41ce-803f-63bd80e198b3&quot; }, &quot;group&quot;:{ &quot;@kind&quot;:&quot;schema&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;schema_3.0.ddl&quot; } } } } } }}" />
<file url="file://$PROJECT_DIR$" scope="{&quot;node&quot;:{ &quot;@negative&quot;:&quot;1&quot;, &quot;group&quot;:{ &quot;@kind&quot;:&quot;root&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;b3228d61-4c36-41ce-803f-63bd80e198b3&quot; }, &quot;group&quot;:{ &quot;@kind&quot;:&quot;schema&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;schema_3.0.ddl&quot; } } } } } }}" />
<file url="PROJECT" scope="" />
</component>
</project>

View File

@ -71,4 +71,7 @@ fmt:
test:
go test ./test/...
migrate:
CGO_ENABLED=1 go build -v -o _build/scn_migrate -tags "timetzdata sqlite_fts5 sqlite_foreign_keys" ./cmd/migrate
./_build/scn_migrate

View File

@ -9,6 +9,8 @@
- finish tests (!)
- migration script for existing data
apply local deletion in (my) app
delete excessive dockerwatch messages (directly in db?)
- app-store link in HTML
@ -43,6 +45,8 @@
(or add another /kuma endpoint)
-> https://webhook.site/
- endpoint to list all servernames of user (distinct select)
#### PERSONAL
- in my script: use `srvname` for sendername

View File

@ -7,6 +7,7 @@ import (
"errors"
"github.com/gin-gonic/gin"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"net/http"
"regexp"
"strings"
@ -14,15 +15,15 @@ import (
type WebsiteHandler struct {
app *logic.Application
rexTemplate *regexp.Regexp
rexConfig *regexp.Regexp
rexTemplate rext.Regex
rexConfig rext.Regex
}
func NewWebsiteHandler(app *logic.Application) WebsiteHandler {
return WebsiteHandler{
app: app,
rexTemplate: regexp.MustCompile("{{template\\|[A-Za-z0-9_\\-\\[\\].]+}}"),
rexConfig: regexp.MustCompile("{{config\\|[A-Za-z0-9_\\-.]+}}"),
rexTemplate: rext.W(regexp.MustCompile("{{template\\|[A-Za-z0-9_\\-\\[\\].]+}}")),
rexConfig: rext.W(regexp.MustCompile("{{config\\|[A-Za-z0-9_\\-.]+}}")),
}
}
@ -77,17 +78,19 @@ func (h WebsiteHandler) CSS(g *gin.Context) ginresp.HTTPResponse {
}
func (h WebsiteHandler) serveAsset(g *gin.Context, fn string, repl bool) ginresp.HTTPResponse {
data, err := website.Assets.ReadFile(fn)
_data, err := website.Assets.ReadFile(fn)
if err != nil {
return ginresp.Status(http.StatusNotFound)
}
data := string(_data)
if repl {
failed := false
data = h.rexTemplate.ReplaceAllFunc(data, func(match []byte) []byte {
data = h.rexTemplate.ReplaceAllFunc(data, func(match string) string {
prefix := len("{{template|")
suffix := len("}}")
fnSub := string(match[prefix : len(match)-suffix])
fnSub := match[prefix : len(match)-suffix]
fnSub = strings.ReplaceAll(fnSub, "[theme]", h.getTheme(g))
@ -96,23 +99,23 @@ func (h WebsiteHandler) serveAsset(g *gin.Context, fn string, repl bool) ginresp
log.Error().Str("templ", string(match)).Str("fnSub", fnSub).Str("source", fn).Msg("Failed to replace template")
failed = true
}
return subdata
return string(subdata)
})
if failed {
return ginresp.InternalError(errors.New("template replacement failed"))
}
data = h.rexConfig.ReplaceAllFunc(data, func(match []byte) []byte {
data = h.rexConfig.ReplaceAllFunc(data, func(match string) string {
prefix := len("{{config|")
suffix := len("}}")
cfgKey := match[prefix : len(match)-suffix]
cval, ok := h.getReplConfig(string(cfgKey))
cval, ok := h.getReplConfig(cfgKey)
if !ok {
log.Error().Str("templ", string(match)).Str("source", fn).Msg("Failed to replace config")
log.Error().Str("templ", match).Str("source", fn).Msg("Failed to replace config")
failed = true
}
return []byte(cval)
return cval
})
if failed {
return ginresp.InternalError(errors.New("config replacement failed"))
@ -138,7 +141,7 @@ func (h WebsiteHandler) serveAsset(g *gin.Context, fn string, repl bool) ginresp
mime = "image/svg+xml"
}
return ginresp.Data(http.StatusOK, mime, data)
return ginresp.Data(http.StatusOK, mime, []byte(data))
}
func (h WebsiteHandler) getReplConfig(key string) (string, bool) {

View File

@ -0,0 +1,871 @@
package main
import (
scn "blackforestbytes.com/simplecloudnotifier"
"blackforestbytes.com/simplecloudnotifier/logic"
"blackforestbytes.com/simplecloudnotifier/models"
"bufio"
"context"
"encoding/json"
"fmt"
_ "github.com/go-sql-driver/mysql"
"github.com/jmoiron/sqlx"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"gogs.mikescher.com/BlackForestBytes/goext/sq"
"os"
"regexp"
"strings"
"time"
)
type OldUser struct {
UserId int64 `db:"user_id"`
UserKey string `db:"user_key"`
FcmToken *string `db:"fcm_token"`
MessagesSent int64 `db:"messages_sent"`
TimestampCreated time.Time `db:"timestamp_created"`
TimestampAccessed *time.Time `db:"timestamp_accessed"`
QuotaToday int64 `db:"quota_today"`
QuotaDay *time.Time `db:"quota_day"`
IsPro bool `db:"is_pro"`
ProToken *string `db:"pro_token"`
}
type OldMessage struct {
ScnMessageId int64 `db:"scn_message_id"`
SenderUserId int64 `db:"sender_user_id"`
TimestampReal time.Time `db:"timestamp_real"`
Ack []uint8 `db:"ack"`
Title string `db:"title"`
Content *string `db:"content"`
Priority int64 `db:"priority"`
Sendtime int64 `db:"sendtime"`
FcmMessageId *string `db:"fcm_message_id"`
UsrMessageId *string `db:"usr_message_id"`
}
type SCNExport struct {
Messages []SCNExportMessage `json:"cmessagelist"`
}
type SCNExportMessage struct {
MessageID int64 `json:"scnid"`
}
func main() {
ctx := context.Background()
conf, _ := scn.GetConfig("local-host")
conf.DBMain.File = ".run-data/migrate_main.sqlite3"
conf.DBMain.EnableLogger = false
if _, err := os.Stat(".run-data/migrate_main.sqlite3"); err == nil {
err = os.Remove(".run-data/migrate_main.sqlite3")
if err != nil {
panic(err)
}
}
if _, err := os.Stat(".run-data/migrate_main.sqlite3-shm"); err == nil {
err = os.Remove(".run-data/migrate_main.sqlite3-shm")
if err != nil {
panic(err)
}
}
if _, err := os.Stat(".run-data/migrate_main.sqlite3-wal"); err == nil {
err = os.Remove(".run-data/migrate_main.sqlite3-wal")
if err != nil {
panic(err)
}
}
sqlite, err := logic.NewDBPool(conf)
if err != nil {
panic(err)
}
err = sqlite.Migrate(ctx)
if err != nil {
panic(err)
}
connstr := os.Getenv("SQL_CONN_STR")
if connstr == "" {
scanner := bufio.NewScanner(os.Stdin)
fmt.Print("Enter DB URL [127.0.0.1:3306]: ")
scanner.Scan()
host := scanner.Text()
if host == "" {
host = "127.0.0.1:3306"
}
fmt.Print("Enter DB Username [root]: ")
scanner.Scan()
username := scanner.Text()
if host == "" {
host = "root"
}
fmt.Print("Enter DB Password []: ")
scanner.Scan()
pass := scanner.Text()
if host == "" {
host = ""
}
connstr = fmt.Sprintf("%s:%s@tcp(%s)", username, pass, host)
}
_dbold, err := sqlx.Open("mysql", connstr+"/simple_cloud_notifier?parseTime=true")
if err != nil {
panic(err)
}
dbold := sq.NewDB(_dbold)
rowsUser, err := dbold.Query(ctx, "SELECT * FROM users", sq.PP{})
if err != nil {
panic(err)
}
var export SCNExport
exfn, err := os.ReadFile("scn_export.json")
err = json.Unmarshal(exfn, &export)
if err != nil {
panic(err)
}
appids := make(map[int64]int64)
for _, v := range export.Messages {
appids[v.MessageID] = v.MessageID
}
users := make([]OldUser, 0)
for rowsUser.Next() {
var u OldUser
err = rowsUser.StructScan(&u)
if err != nil {
panic(err)
}
users = append(users, u)
}
fmt.Printf("\n")
for _, v := range users {
fmt.Printf("========================================\n")
fmt.Printf(" MIGRATE USER %d\n", v.UserId)
fmt.Printf("========================================\n")
migrateUser(ctx, sqlite.Primary.DB(), dbold, v, appids)
fmt.Printf("========================================\n")
fmt.Printf("\n")
fmt.Printf("\n")
}
err = sqlite.Stop(context.Background())
if err != nil {
panic(err)
}
}
var rexTitleChannel = rext.W(regexp.MustCompile("^\\[(?P<channel>[A-Za-z\\-0-9_ ]+)] (?P<title>(.|\\r|\\n)+)$"))
var usedFCM = make(map[string]models.ClientID)
func migrateUser(ctx context.Context, dbnew sq.DB, dbold sq.DB, user OldUser, appids map[int64]int64) {
rowsMessages, err := dbold.Query(ctx, "SELECT * FROM messages WHERE sender_user_id = :uid ORDER BY timestamp_real ASC", sq.PP{"uid": user.UserId})
if err != nil {
panic(err)
}
messages := make([]OldMessage, 0)
for rowsMessages.Next() {
var m OldMessage
err = rowsMessages.StructScan(&m)
if err != nil {
panic(err)
}
messages = append(messages, m)
}
fmt.Printf("Found %d messages\n", len(messages))
userid := models.NewUserID()
fmt.Printf("New UserID: %s\n", userid)
readKey := scn.RandomAuthKey()
sendKey := scn.RandomAuthKey()
adminKey := user.UserKey
protoken := user.ProToken
if protoken != nil {
protoken = langext.Ptr("ANDROID|v1|" + *protoken)
}
_, err = dbnew.Exec(ctx, "INSERT INTO users (user_id, username, read_key, send_key, admin_key, is_pro, pro_token, timestamp_created) VALUES (:uid, :un, :rk, :sk, :ak, :pro, :tok, :ts)", sq.PP{
"uid": userid,
"un": nil,
"rk": readKey,
"sk": sendKey,
"ak": adminKey,
"pro": langext.Conditional(user.IsPro, 1, 0),
"tok": protoken,
"ts": user.TimestampCreated.UnixMilli(),
})
if err != nil {
panic(err)
}
_, err = dbnew.Exec(ctx, "INSERT INTO compat_ids (old, new, type) VALUES (:old, :new, :typ)", sq.PP{
"old": user.UserId,
"new": userid,
"typ": "userid",
})
if err != nil {
panic(err)
}
var clientid *models.ClientID = nil
if user.FcmToken != nil && *user.FcmToken != "BLACKLISTED" {
if _, ok := usedFCM[*user.FcmToken]; ok {
fmt.Printf("Skip Creating Client (fcm token reuse)\n")
} else {
_clientid := models.NewClientID()
_, err = dbnew.Exec(ctx, "INSERT INTO clients (client_id, user_id, type, fcm_token, timestamp_created, agent_model, agent_version) VALUES (:cid, :uid, :typ, :fcm, :ts, :am, :av)", sq.PP{
"cid": _clientid,
"uid": userid,
"typ": "ANDROID",
"fcm": *user.FcmToken,
"ts": user.TimestampCreated.UnixMilli(),
"am": "[migrated]",
"av": "[migrated]",
})
if err != nil {
panic(err)
}
fmt.Printf("Created Client %s\n", _clientid)
clientid = &_clientid
usedFCM[*user.FcmToken] = _clientid
}
}
mainChannelID := models.NewChannelID()
_, err = dbnew.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, description_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :hnam, :subkey, :sendkey, :ts)", sq.PP{
"cid": mainChannelID,
"ouid": userid,
"dnam": "main",
"inam": "main",
"hnam": nil,
"subkey": scn.RandomAuthKey(),
"sendkey": scn.RandomAuthKey(),
"ts": user.TimestampCreated.UnixMilli(),
})
if err != nil {
panic(err)
}
fmt.Printf("Created (Main) Channel [%s]: %s\n", "main", mainChannelID)
_, err = dbnew.Exec(ctx, "INSERT INTO subscriptions (subscription_id, subscriber_user_id, channel_owner_user_id, channel_internal_name, channel_id, timestamp_created, confirmed) VALUES (:sid, :suid, :ouid, :cnam, :cid, :ts, :conf)", sq.PP{
"sid": models.NewSubscriptionID(),
"suid": user.UserId,
"ouid": user.UserId,
"cnam": "main",
"cid": mainChannelID,
"ts": user.TimestampCreated.UnixMilli(),
"conf": true,
})
if err != nil {
panic(err)
}
channelMap := make(map[string]models.ChannelID)
lastTitle := ""
lastChannel := models.NewChannelID()
lastContent := langext.Ptr("")
lastSendername := langext.Ptr("")
lastTimestamp := time.Time{}
for _, oldmessage := range messages {
messageid := models.NewMessageID()
title := oldmessage.Title
channelInternalName := "main"
channelID := mainChannelID
if oldmessage.UsrMessageId != nil && strings.TrimSpace(*oldmessage.UsrMessageId) == "" {
oldmessage.UsrMessageId = nil
}
if match, ok := rexTitleChannel.MatchFirst(title); ok {
chanNameTitle := match.GroupByName("channel").Value()
if strings.HasPrefix(chanNameTitle, "VBOARD ERROR") {
chanNameTitle = "VBOARD-ERROR"
}
if chanNameTitle != "status" {
title = match.GroupByName("title").Value()
dummyApp := logic.Application{}
dispName := dummyApp.NormalizeChannelDisplayName(chanNameTitle)
intName := dummyApp.NormalizeChannelInternalName(chanNameTitle)
if v, ok := channelMap[intName]; ok {
channelID = v
channelInternalName = intName
} else {
channelID = models.NewChannelID()
channelInternalName = intName
_, err = dbnew.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, description_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :hnam, :subkey, :sendkey, :ts)", sq.PP{
"cid": channelID,
"ouid": userid,
"dnam": dispName,
"inam": intName,
"hnam": nil,
"subkey": scn.RandomAuthKey(),
"sendkey": scn.RandomAuthKey(),
"ts": oldmessage.TimestampReal.UnixMilli(),
})
if err != nil {
panic(err)
}
_, err = dbnew.Exec(ctx, "INSERT INTO subscriptions (subscription_id, subscriber_user_id, channel_owner_user_id, channel_internal_name, channel_id, timestamp_created, confirmed) VALUES (:sid, :suid, :ouid, :cnam, :cid, :ts, :conf)", sq.PP{
"sid": models.NewSubscriptionID(),
"suid": user.UserId,
"ouid": user.UserId,
"cnam": intName,
"cid": channelID,
"ts": oldmessage.TimestampReal.UnixMilli(),
"conf": true,
})
if err != nil {
panic(err)
}
channelMap[intName] = channelID
fmt.Printf("Auto Created Channel [%s]: %s\n", dispName, channelID)
}
}
}
sendername := determineSenderName(user, oldmessage, title, oldmessage.Content, channelInternalName)
if lastTitle == title && channelID == lastChannel &&
langext.PtrEquals(lastContent, oldmessage.Content) &&
langext.PtrEquals(lastSendername, sendername) && oldmessage.TimestampReal.Sub(lastTimestamp) < 5*time.Second {
lastTitle = title
lastChannel = channelID
lastContent = oldmessage.Content
lastSendername = sendername
lastTimestamp = oldmessage.TimestampReal
fmt.Printf("Skip message [%d] \"%s\" (fast-duplicate)\n", oldmessage.ScnMessageId, oldmessage.Title)
continue
}
var sendTimeMillis *int64 = nil
if oldmessage.Sendtime > 0 && (oldmessage.Sendtime*1000) != oldmessage.TimestampReal.UnixMilli() {
sendTimeMillis = langext.Ptr(oldmessage.Sendtime * 1000)
}
if user.UserId == 56 && oldmessage.ScnMessageId >= 15729 {
if _, ok := appids[oldmessage.ScnMessageId]; !ok {
lastTitle = title
lastChannel = channelID
lastContent = oldmessage.Content
lastSendername = sendername
lastTimestamp = oldmessage.TimestampReal
fmt.Printf("Skip message [%d] \"%s\" (locally deleted in app)\n", oldmessage.ScnMessageId, oldmessage.Title)
continue
}
}
pp := sq.PP{
"mid": messageid,
"suid": userid,
"ouid": user.UserId,
"cnam": channelInternalName,
"cid": channelID,
"tsr": oldmessage.TimestampReal.UnixMilli(),
"tsc": sendTimeMillis,
"tit": title,
"cnt": oldmessage.Content,
"prio": oldmessage.Priority,
"umid": oldmessage.UsrMessageId,
"ip": "",
"snam": sendername,
}
_, err = dbnew.Exec(ctx, "INSERT INTO messages (message_id, sender_user_id, owner_user_id, channel_internal_name, channel_id, timestamp_real, timestamp_client, title, content, priority, usr_message_id, sender_ip, sender_name) VALUES (:mid, :suid, :ouid, :cnam, :cid, :tsr, :tsc, :tit, :cnt, :prio, :umid, :ip, :snam)", pp)
if err != nil {
jv, _ := json.MarshalIndent(pp, "", " ")
fmt.Printf("%s", string(jv))
panic(err)
}
_, err = dbnew.Exec(ctx, "INSERT INTO compat_ids (old, new, type) VALUES (:old, :new, :typ)", sq.PP{
"old": oldmessage.ScnMessageId,
"new": messageid,
"typ": "messageid",
})
if err != nil {
panic(err)
}
if len(oldmessage.Ack) == 1 && oldmessage.Ack[0] == 1 {
if clientid != nil {
_, err = dbnew.Exec(ctx, "INSERT INTO deliveries (delivery_id, message_id, receiver_user_id, receiver_client_id, timestamp_created, timestamp_finalized, status, fcm_message_id, next_delivery) VALUES (:did, :mid, :ruid, :rcid, :tsc, :tsf, :stat, :fcm, :next)", sq.PP{
"did": models.NewDeliveryID(),
"mid": messageid,
"ruid": user.UserId,
"rcid": *clientid,
"tsc": oldmessage.TimestampReal.UnixMilli(),
"tsf": oldmessage.TimestampReal.UnixMilli(),
"stat": models.DeliveryStatusSuccess,
"fcm": *user.FcmToken,
"next": nil,
})
if err != nil {
panic(err)
}
}
} else if len(oldmessage.Ack) == 1 && oldmessage.Ack[0] == 0 {
if clientid != nil {
_, err = dbnew.Exec(ctx, "INSERT INTO deliveries (delivery_id, message_id, receiver_user_id, receiver_client_id, timestamp_created, timestamp_finalized, status, fcm_message_id, next_delivery) VALUES (:did, :mid, :ruid, :rcid, :tsc, :tsf, :stat, :fcm, :next)", sq.PP{
"did": models.NewDeliveryID(),
"mid": messageid,
"ruid": user.UserId,
"rcid": *clientid,
"tsc": oldmessage.TimestampReal.UnixMilli(),
"tsf": oldmessage.TimestampReal.UnixMilli(),
"stat": models.DeliveryStatusFailed,
"fcm": *user.FcmToken,
"next": nil,
})
if err != nil {
panic(err)
}
fmt.Printf("Create failed-delivery for message %d (no ack)\n", oldmessage.ScnMessageId)
}
} else {
panic("cannot parse ack")
}
lastTitle = title
lastChannel = channelID
lastContent = oldmessage.Content
lastSendername = sendername
lastTimestamp = oldmessage.TimestampReal
}
}
func determineSenderName(user OldUser, oldmessage OldMessage, title string, content *string, channame string) *string {
if user.UserId != 56 {
return nil
}
if channame == "t-ctrl" {
return langext.Ptr("sbox")
}
if channame == "torr" {
return langext.Ptr("sbox")
}
if channame == "yt-dl" {
return langext.Ptr("mscom")
}
if channame == "ncc-upload" {
return langext.Ptr("mscom")
}
if channame == "cron" {
if strings.Contains(title, "error on bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "error on mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "error on niflheim-3") {
return langext.Ptr("niflheim-3")
}
if strings.Contains(*content, "on mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "on bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "gogitmirror_cron") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "comic_downloader") {
return langext.Ptr("mscom")
}
}
if channame == "sshguard" {
if strings.Contains(*content, "logged in to mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "logged in to bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "logged in to statussrv") {
return langext.Ptr("statussrv")
}
}
if channame == "docker-watch" {
if strings.Contains(title, "on plantafelstaging") {
return langext.Ptr("plantafelstaging")
}
if strings.Contains(title, "@ mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "@ bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/scn_server:latest") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "archivebox/archivebox:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "antoniomika/sish:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "binwiederhier/ntfy:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "registry.blackforestbytes.com/kgserver:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "registry.blackforestbytes.com/mikescher/kgserver:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "jenkins/jenkins:lts") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "mikescher/youtube-dl-viewer:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "etherpad/etherpad:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "teamcity_agent") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "teamcity_server") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/inoshop/") {
return langext.Ptr("inoshop")
}
if strings.Contains(*content, "inopart_mongo_") {
return langext.Ptr("inoshop")
}
if strings.Contains(*content, "Image: wkk_") {
return langext.Ptr("wkk")
}
if strings.Contains(*content, "registry.blackforestbytes.com/holz100") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/bewirto") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "registry.blackforestbytes.com/bfb-website") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/bfb/website") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/psycho/backend") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "registry.blackforestbytes.com/vereinsboard") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/isiproject") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/ar-app-supportchat-server") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/planitec/ar-app-supportchat-server") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "docker_registry") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/balu") && strings.Contains(*content, "prod") {
return langext.Ptr("lbxprod")
}
if strings.Contains(*content, "registry.blackforestbytes.com/balu") && strings.Contains(*content, "dev") {
return langext.Ptr("lbxdev")
}
if strings.Contains(*content, "Server: bfb-testserver") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "wptest_") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "balu-db") {
return langext.Ptr("lbprod")
}
}
if channame == "certbot" {
if strings.Contains(title, "Update cert_badennet_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_badennet_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_bfbugs_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update bfbugs_0001") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update inoshop_bfb") {
return langext.Ptr("inoshop")
}
if strings.Contains(title, "Update cert_bfb_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_bugkultur_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_public_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_korbers_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_wkk_staging_external") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update cert_wkk_production_external") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update cert_wkk_develop_external") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update cert_wkk_internal") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update bfb_de_wildcard") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update cannonconquest") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update isiproject_wildcard") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update vereinsboard_demo") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update vereinsboard_wildcard") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update cert_bewirto_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_badennet_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_mampfkultur_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_psycho_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "DNS:*.blackforestbytes.com") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "DNS:*.mikescher.com") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "plantafel-digital.de") {
return langext.Ptr("plan-web-prod")
}
if strings.Contains(title, "plantafeldev.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(title, "plantafelstaging.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(*content, "DNS:*.plantafeldev.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(*content, "plantafel-digital.de") {
return langext.Ptr("plan-web-prod")
}
if strings.Contains(*content, "plantafeldev.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(*content, "plantafelstaging.de") {
return langext.Ptr("plantafeldev")
}
}
if channame == "space-warning" {
if title == "bfb" {
return langext.Ptr("bfb")
}
if title == "mscom" {
return langext.Ptr("mscom")
}
if title == "plan-web-prod" {
return langext.Ptr("plan-web-prod")
}
if title == "statussrv" {
return langext.Ptr("statussrv")
}
}
if channame == "srv-backup" {
if strings.Contains(*content, "Server: bfb-testserver") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "Server: bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "Server: mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "Server: statussrv") {
return langext.Ptr("statussrv")
}
}
if title == "[status] Updating uptime-kuma image" {
return langext.Ptr("statussrv")
}
if channame == "omv-backup" {
return langext.Ptr("omv")
}
if channame == "omv-rcheck" {
return langext.Ptr("omv")
}
if channame == "tfin" {
return langext.Ptr("sbox")
}
if channame == "vboard-error" {
return langext.Ptr("bfb")
}
if channame == "vboard" {
return langext.Ptr("bfb")
}
if channame == "cubox" {
return langext.Ptr("cubox")
}
if channame == "sys" {
if strings.Contains(title, "h2896063") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "h2516246") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "h2770024") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Reboot plan-web-prod") {
return langext.Ptr("plan-web-prod")
}
if strings.Contains(title, "Reboot mikescher.com") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Reboot blackforestbytes.com") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Reboot plan-web-dev") {
return langext.Ptr("plan-web-dev")
}
if strings.Contains(title, "Reboot plan-web-staging") {
return langext.Ptr("plan-web-staging")
}
if strings.Contains(title, "Reboot virmach-01") {
return langext.Ptr("statussrv")
}
if strings.Contains(title, "Reboot wkk-1") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Reboot lbxprod") {
return langext.Ptr("lbxprod")
}
}
if channame == "yt-tvc" {
return langext.Ptr("mscom")
}
if channame == "gdapi" {
return langext.Ptr("bfb")
}
if channame == "ttrss" {
return langext.Ptr("mscom")
}
if title == "NCC Upload failed" || title == "NCC Upload successful" {
return langext.Ptr("mscom")
}
if oldmessage.ScnMessageId == 7975 {
return langext.Ptr("mscom")
}
if strings.Contains(title, "bfbackup job") {
return langext.Ptr("bfbackup")
}
if strings.Contains(title, "Repo migration of /volume1") {
return langext.Ptr("bfbackup")
}
//fmt.Printf("Failed to determine sender of [%d] '%s' '%s'\n", oldmessage.ScnMessageId, oldmessage.Title, langext.Coalesce(oldmessage.Content, "<NULL>"))
fmt.Printf("Failed to determine sender of [%d] '%s'\n", oldmessage.ScnMessageId, oldmessage.Title)
return nil
}

View File

@ -55,6 +55,7 @@ type DBConfig struct {
CheckForeignKeys bool `env:"CHECKFOREIGNKEYS"`
SingleConn bool `env:"SINGLECONNECTION"`
BusyTimeout time.Duration `env:"BUSYTIMEOUT"`
EnableLogger bool `env:"ENABLELOGGER"`
}
var Conf Config
@ -78,6 +79,7 @@ var configLocHost = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: ".run-data/loc_requests.sqlite3",
@ -90,6 +92,7 @@ var configLocHost = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: ".run-data/loc_logs.sqlite3",
@ -102,6 +105,7 @@ var configLocHost = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@ -147,6 +151,7 @@ var configLocDocker = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/docker_scn_requests.sqlite3",
@ -159,6 +164,7 @@ var configLocDocker = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/docker_scn_logs.sqlite3",
@ -171,6 +177,7 @@ var configLocDocker = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@ -215,6 +222,7 @@ var configDev = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/scn_requests.sqlite3",
@ -227,6 +235,7 @@ var configDev = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/scn_logs.sqlite3",
@ -239,6 +248,7 @@ var configDev = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@ -283,6 +293,7 @@ var configStag = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/scn_requests.sqlite3",
@ -295,6 +306,7 @@ var configStag = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/scn_logs.sqlite3",
@ -307,6 +319,7 @@ var configStag = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@ -351,6 +364,7 @@ var configProd = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/scn_requests.sqlite3",
@ -363,6 +377,7 @@ var configProd = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/scn_logs.sqlite3",
@ -375,6 +390,7 @@ var configProd = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,

View File

@ -6,6 +6,8 @@ import (
)
type DatabaseImpl interface {
DB() sq.DB
Migrate(ctx context.Context) error
Ping(ctx context.Context) error
BeginTx(ctx context.Context) (sq.Tx, error)

View File

@ -81,7 +81,7 @@ func (l DBLogger) PostExec(txID *uint16, sqlOriginal string, sqlReal string, par
}
func fmtSQLPrint(sql string) string {
if strings.Contains(sql, ";") {
if strings.Contains(sql, ";") && len(sql) > 1024 {
return "(...multi...)"
}

View File

@ -6,6 +6,7 @@ import (
"fmt"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"gogs.mikescher.com/BlackForestBytes/goext/sq"
"regexp"
"strings"
@ -37,7 +38,7 @@ type DBPreprocessor struct {
cacheQuery map[string]string
}
var regexAlias = regexp.MustCompile("([A-Za-z_\\-0-9]+)\\s+AS\\s+([A-Za-z_\\-0-9]+)")
var regexAlias = rext.W(regexp.MustCompile("([A-Za-z_\\-0-9]+)\\s+AS\\s+([A-Za-z_\\-0-9]+)"))
func NewDBPreprocessor(db sq.DB) (*DBPreprocessor, error) {
@ -146,8 +147,8 @@ func (pp *DBPreprocessor) PreQuery(ctx context.Context, txID *uint16, sql *strin
newsel := make([]string, 0)
aliasMap := make(map[string]string)
for _, v := range regexAlias.FindAllStringSubmatch(sqlOriginal, idxFrom+len(" FROM")) {
aliasMap[strings.TrimSpace(v[2])] = strings.TrimSpace(v[1])
for _, v := range regexAlias.MatchAll(sqlOriginal) {
aliasMap[strings.TrimSpace(v.GroupByIndex(1).Value())] = strings.TrimSpace(v.GroupByIndex(2).Value())
}
for _, expr := range split {

View File

@ -42,7 +42,9 @@ func NewLogsDatabase(cfg server.Config) (*Database, error) {
qqdb := sq.NewDB(xdb)
qqdb.AddListener(dbtools.DBLogger{})
if conf.EnableLogger {
qqdb.AddListener(dbtools.DBLogger{})
}
pp, err := dbtools.NewDBPreprocessor(qqdb)
if err != nil {
@ -56,6 +58,10 @@ func NewLogsDatabase(cfg server.Config) (*Database, error) {
return scndb, nil
}
func (db *Database) DB() sq.DB {
return db.db
}
func (db *Database) Migrate(ctx context.Context) error {
ctx, cancel := context.WithTimeout(context.Background(), 24*time.Second)
defer cancel()

View File

@ -91,11 +91,12 @@ func (db *Database) CreateChannel(ctx TxContext, userid models.UserID, dispName
channelid := models.NewChannelID()
_, err = tx.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :subkey, :sendkey, :ts)", sq.PP{
_, err = tx.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, description_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :hnam, :subkey, :sendkey, :ts)", sq.PP{
"cid": channelid,
"ouid": userid,
"dnam": dispName,
"inam": intName,
"hnam": nil,
"subkey": subscribeKey,
"sendkey": sendKey,
"ts": time2DB(now),

View File

@ -42,7 +42,9 @@ func NewPrimaryDatabase(cfg server.Config) (*Database, error) {
qqdb := sq.NewDB(xdb)
qqdb.AddListener(dbtools.DBLogger{})
if conf.EnableLogger {
qqdb.AddListener(dbtools.DBLogger{})
}
pp, err := dbtools.NewDBPreprocessor(qqdb)
if err != nil {
@ -56,6 +58,10 @@ func NewPrimaryDatabase(cfg server.Config) (*Database, error) {
return scndb, nil
}
func (db *Database) DB() sq.DB {
return db.db
}
func (db *Database) Migrate(ctx context.Context) error {
ctx, cancel := context.WithTimeout(context.Background(), 24*time.Second)
defer cancel()

View File

@ -42,7 +42,9 @@ func NewRequestsDatabase(cfg server.Config) (*Database, error) {
qqdb := sq.NewDB(xdb)
qqdb.AddListener(dbtools.DBLogger{})
if conf.EnableLogger {
qqdb.AddListener(dbtools.DBLogger{})
}
pp, err := dbtools.NewDBPreprocessor(qqdb)
if err != nil {
@ -56,6 +58,10 @@ func NewRequestsDatabase(cfg server.Config) (*Database, error) {
return scndb, nil
}
func (db *Database) DB() sq.DB {
return db.db
}
func (db *Database) Migrate(ctx context.Context) error {
ctx, cancel := context.WithTimeout(context.Background(), 24*time.Second)
defer cancel()

View File

@ -4,10 +4,12 @@ go 1.19
require (
github.com/gin-gonic/gin v1.8.1
github.com/go-playground/validator/v10 v10.10.0
github.com/go-sql-driver/mysql v1.6.0
github.com/jmoiron/sqlx v1.3.5
github.com/mattn/go-sqlite3 v1.14.16
github.com/rs/zerolog v1.28.0
gogs.mikescher.com/BlackForestBytes/goext v0.0.56
gogs.mikescher.com/BlackForestBytes/goext v0.0.59
gopkg.in/loremipsum.v1 v1.1.0
)
@ -15,7 +17,6 @@ require (
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.0 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/go-playground/validator/v10 v10.10.0 // indirect
github.com/goccy/go-json v0.9.7 // indirect
github.com/google/go-cmp v0.5.9 // indirect
github.com/json-iterator/go v1.1.12 // indirect

View File

@ -73,14 +73,14 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0=
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
gogs.mikescher.com/BlackForestBytes/goext v0.0.49 h1:Ro62ZyJW22elAJKT0XlY94LzAv0dVuiI2m0/Hp1xLgk=
gogs.mikescher.com/BlackForestBytes/goext v0.0.49/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.50 h1:WuhfxFVyywR7J4+hSTTW/wE87aFbGk7q22TGYusPg0s=
gogs.mikescher.com/BlackForestBytes/goext v0.0.50/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.55 h1:mzX/s+EBhnaRbiz3+6iwDJyJFS0F+jkbssiLDr9eJYY=
gogs.mikescher.com/BlackForestBytes/goext v0.0.55/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.56 h1:nl+2mP3BmkeB3kT6zFNXqYkOLc3JnFF3m8QwhxZJf2A=
gogs.mikescher.com/BlackForestBytes/goext v0.0.56/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.57 h1:R5M0Y+4kS6v5GtsXcHlDBYbcfenj1nOmAaNj4XQUous=
gogs.mikescher.com/BlackForestBytes/goext v0.0.57/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.58 h1:W53yfHhpFQS13zgtzCjfJQ42WG0OORa+kQWKrp+W73Q=
gogs.mikescher.com/BlackForestBytes/goext v0.0.58/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.59 h1:3bHSjqgty9yp0EIyqwGAb06ZS7bLvm806zRj6j+WOEE=
gogs.mikescher.com/BlackForestBytes/goext v0.0.59/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8=
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80=

View File

@ -12,8 +12,8 @@ import (
"github.com/gin-gonic/gin/binding"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"gogs.mikescher.com/BlackForestBytes/goext/syncext"
"math/rand"
"net"
"net/http"
"os"
@ -24,9 +24,9 @@ import (
"time"
)
var rexWhitespaceStart = regexp.MustCompile("^\\s+")
var rexWhitespaceEnd = regexp.MustCompile("\\s+$")
var rexWhitespaceStart = rext.W(regexp.MustCompile("^\\s+"))
var rexWhitespaceEnd = rext.W(regexp.MustCompile("\\s+$"))
var rexNormalizeUsername = rext.W(regexp.MustCompile("[^[:alnum:]\\-_ ]"))
type Application struct {
Config scn.Config
@ -154,12 +154,7 @@ func (app *Application) Run() {
}
func (app *Application) GenerateRandomAuthKey() string {
charset := "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
k := ""
for i := 0; i < 64; i++ {
k += string(charset[rand.Int()%len(charset)])
}
return k
return scn.RandomAuthKey()
}
func (app *Application) QuotaMax(ispro bool) int {
@ -171,6 +166,10 @@ func (app *Application) QuotaMax(ispro bool) int {
}
func (app *Application) VerifyProToken(ctx *AppContext, token string) (bool, error) {
if strings.HasPrefix(token, "ANDROID|v1|") {
subToken := token[len("ANDROID|v2|"):]
return app.VerifyAndroidProToken(ctx, subToken)
}
if strings.HasPrefix(token, "ANDROID|v2|") {
subToken := token[len("ANDROID|v2|"):]
return app.VerifyAndroidProToken(ctx, subToken)
@ -319,8 +318,8 @@ func (app *Application) GetOrCreateChannel(ctx *AppContext, userid models.UserID
func (app *Application) NormalizeChannelDisplayName(v string) string {
v = strings.TrimSpace(v)
v = rexWhitespaceStart.ReplaceAllString(v, "")
v = rexWhitespaceEnd.ReplaceAllString(v, "")
v = rexWhitespaceStart.RemoveAll(v)
v = rexWhitespaceEnd.RemoveAll(v)
return v
}
@ -328,17 +327,15 @@ func (app *Application) NormalizeChannelDisplayName(v string) string {
func (app *Application) NormalizeChannelInternalName(v string) string {
v = strings.TrimSpace(v)
v = strings.ToLower(v)
v = rexWhitespaceStart.ReplaceAllString(v, "")
v = rexWhitespaceEnd.ReplaceAllString(v, "")
v = rexWhitespaceStart.RemoveAll(v)
v = rexWhitespaceEnd.RemoveAll(v)
return v
}
func (app *Application) NormalizeUsername(v string) string {
rex := regexp.MustCompile("[^[:alnum:]\\-_ ]")
v = strings.TrimSpace(v)
v = rex.ReplaceAllString(v, "")
v = rexNormalizeUsername.RemoveAll(v)
return v
}

View File

@ -7,6 +7,7 @@ import (
"github.com/go-playground/validator/v10"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"math/big"
"reflect"
"regexp"
@ -19,7 +20,7 @@ type EntityID interface {
Prefix() string
Raw() string
CheckString() string
Regex() *regexp.Regexp
Regex() rext.Regex
}
const idlen = 24
@ -51,8 +52,8 @@ var (
regexRequestID = generateRegex(prefixRequestID)
)
func generateRegex(prefix string) *regexp.Regexp {
return regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen))
func generateRegex(prefix string) rext.Regex {
return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen)))
}
func generateCharsetMap() []int {
@ -179,7 +180,7 @@ func (id UserID) CheckString() string {
return getCheckString(prefixUserID, string(id))
}
func (id UserID) Regex() *regexp.Regexp {
func (id UserID) Regex() rext.Regex {
return regexUserID
}
@ -211,7 +212,7 @@ func (id ChannelID) CheckString() string {
return getCheckString(prefixChannelID, string(id))
}
func (id ChannelID) Regex() *regexp.Regexp {
func (id ChannelID) Regex() rext.Regex {
return regexChannelID
}
@ -243,7 +244,7 @@ func (id DeliveryID) CheckString() string {
return getCheckString(prefixDeliveryID, string(id))
}
func (id DeliveryID) Regex() *regexp.Regexp {
func (id DeliveryID) Regex() rext.Regex {
return regexDeliveryID
}
@ -275,7 +276,7 @@ func (id MessageID) CheckString() string {
return getCheckString(prefixMessageID, string(id))
}
func (id MessageID) Regex() *regexp.Regexp {
func (id MessageID) Regex() rext.Regex {
return regexMessageID
}
@ -307,7 +308,7 @@ func (id SubscriptionID) CheckString() string {
return getCheckString(prefixSubscriptionID, string(id))
}
func (id SubscriptionID) Regex() *regexp.Regexp {
func (id SubscriptionID) Regex() rext.Regex {
return regexSubscriptionID
}
@ -339,7 +340,7 @@ func (id ClientID) CheckString() string {
return getCheckString(prefixClientID, string(id))
}
func (id ClientID) Regex() *regexp.Regexp {
func (id ClientID) Regex() rext.Regex {
return regexClientID
}
@ -371,6 +372,6 @@ func (id RequestID) CheckString() string {
return getCheckString(prefixRequestID, string(id))
}
func (id RequestID) Regex() *regexp.Regexp {
func (id RequestID) Regex() rext.Regex {
return regexRequestID
}

View File

@ -2,6 +2,7 @@ package server
import (
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
"math/rand"
"time"
)
@ -12,3 +13,12 @@ func QuotaDayString() string {
func NextDeliveryTimestamp(now time.Time) time.Time {
return now.Add(5 * time.Second)
}
func RandomAuthKey() string {
charset := "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
k := ""
for i := 0; i < 64; i++ {
k += string(charset[rand.Int()%len(charset)])
}
return k
}