Compare commits

...

1 Commits
master ... 9.3

Author SHA1 Message Date
Tamas Soltesz 41c22f589b
backport: logs to otel (#1163)
* backport: logs to otel

fix: add implementationDependencies.json dependencies

chore: build version and changelog

fix: add missing config and devConfig entries

* fix: for backport 9.3

* fix: add oauth provider for test runner

* fix: use the defined createdTime

---------

Co-authored-by: Sattvik Chakravarthy <sattvik@gmail.com>
2025-08-18 14:00:37 +05:30
50 changed files with 2150 additions and 990 deletions

View File

@ -1,57 +0,0 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,57 +0,0 @@
FROM ubuntu:18.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,57 +0,0 @@
FROM ubuntu:22.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN [ -d /var/run/mysqld ] || mkdir -p /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,95 +0,0 @@
version: 2.1
orbs:
slack: circleci/slack@3.4.2
jobs:
test:
docker:
- image: rishabhpoddar/supertokens_core_testing
- image: rishabhpoddar/oauth-server-cicd
- image: mongo
environment:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: root
resource_class: large
parallelism: 4
parameters:
plugin:
type: string
steps:
- checkout
- run: mkdir ~/junit
- run: echo $'\n[mysqld]\ncharacter_set_server=utf8mb4\nmax_connections=10000' >> /etc/mysql/mysql.cnf
- run: apt-get update && apt-get -y -q install postgresql-9.5 postgresql-client-9.5 postgresql-contrib-9.5 sudo
- run: echo "host all all 0.0.0.0/0 md5" >> /etc/postgresql/9.5/main/pg_hba.conf
- run: echo "listen_addresses='*'" >> /etc/postgresql/9.5/main/postgresql.conf
- run: sed -i 's/^#*\s*max_connections\s*=.*/max_connections = 10000/' /etc/postgresql/9.5/main/postgresql.conf
- run: (cd .circleci/ && ./doTests.sh << parameters.plugin >>)
- store_test_results:
path: ~/junit
- slack/status
mark-passed:
docker:
- image: rishabhpoddar/supertokens_core_testing
steps:
- checkout
- run: (cd .circleci && ./markPassed.sh)
- slack/status
workflows:
version: 2
tagged-build:
jobs:
- test:
plugin: sqlite
name: test-sqlite
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: mongodb
name: test-mongodb
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: postgresql
name: test-postgresql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: mysql
name: test-mysql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- mark-passed:
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
requires:
- test-sqlite
- test-mongodb
- test-postgresql
- test-mysql

View File

@ -1,260 +0,0 @@
function cleanup {
if test -f "pluginInterfaceExactVersionsOutput"; then
rm pluginInterfaceExactVersionsOutput
fi
}
trap cleanup EXIT
cleanup
pluginToTest=$1
pinnedDBJson=$(curl -s -X GET \
'https://api.supertokens.io/0/plugin/pinned?planType=FREE' \
-H 'api-version: 0')
pinnedDBLength=$(echo "$pinnedDBJson" | jq ".plugins | length")
pinnedDBArray=$(echo "$pinnedDBJson" | jq ".plugins")
echo "got pinned dbs..."
pluginInterfaceJson=$(cat ../pluginInterfaceSupported.json)
pluginInterfaceLength=$(echo "$pluginInterfaceJson" | jq ".versions | length")
pluginInterfaceArray=$(echo "$pluginInterfaceJson" | jq ".versions")
echo "got plugin interface relations"
coreDriverJson=$(cat ../coreDriverInterfaceSupported.json)
coreDriverArray=$(echo "$coreDriverJson" | jq ".versions")
echo "got core driver relations"
./getPluginInterfaceExactVersions.sh "$pluginInterfaceLength" "$pluginInterfaceArray"
if [[ $? -ne 0 ]]
then
echo "all plugin interfaces found... failed. exiting!"
exit 1
else
echo "all plugin interfaces found..."
fi
# get core version
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PUT \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"pluginInterfaces\": $pluginInterfaceArray,
\"coreDriverInterfaces\": $coreDriverArray
}")
if [ "$responseStatus" -ne "200" ]
then
echo "failed core PUT API status code: $responseStatus. Exiting!"
exit 1
fi
mkdir -p ~/junit
someTestsRan=false
while read -u 10 line
do
if [[ $line = "" ]]; then
continue
fi
i=0
currTag=$(echo "$line" | jq .tag)
currTag=$(echo "$currTag" | tr -d '"')
currVersion=$(echo "$line" | jq .version)
currVersion=$(echo "$currVersion" | tr -d '"')
piX=$(cut -d'.' -f1 <<<"$currVersion")
piY=$(cut -d'.' -f2 <<<"$currVersion")
piVersion="$piX.$piY"
while [ $i -lt "$pinnedDBLength" ]; do
someTestsRan=true
currPinnedDb=$(echo "$pinnedDBArray" | jq ".[$i]")
currPinnedDb=$(echo "$currPinnedDb" | tr -d '"')
i=$((i+1))
if [[ $currPinnedDb == $pluginToTest ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion ====="
echo ""
echo ""
echo ""
echo ""
echo ""
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/dependency/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$piVersion&pluginName=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .plugin) == "null" ]]
then
echo "fetching latest X.Y version for $currPinnedDb given plugin-interface X.Y version: $piVersion gave response: $response"
exit 1
fi
pinnedDbVersionX2=$(echo $response | jq .plugin | tr -d '"')
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$pinnedDbVersionX2&name=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .tag) == "null" ]]
then
echo "fetching latest X.Y.Z version for $currPinnedDb, X.Y version: $pinnedDbVersionX2 gave response: $response"
exit 1
fi
pinnedDbVersionTag=$(echo "$response" | jq .tag | tr -d '"')
pinnedDbVersion=$(echo "$response" | jq .version | tr -d '"')
./startDb.sh "$currPinnedDb"
fi
cd ../../
git clone git@github.com:supertokens/supertokens-root.git
cd supertokens-root
rm gradle.properties
update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-15.0.1/bin/java" 2
update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-15.0.1/bin/javac" 2
coreX=$(cut -d'.' -f1 <<<"$coreVersion")
coreY=$(cut -d'.' -f2 <<<"$coreVersion")
if [[ $currPinnedDb == "sqlite" ]]
then
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion" > modules.txt
else
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion\n$currPinnedDb-plugin,$pinnedDbVersionX2" > modules.txt
fi
./loadModules
cd supertokens-core
git checkout dev-v$coreVersion
cd ../supertokens-plugin-interface
git checkout $currTag
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
cd ../supertokens-$currPinnedDb-plugin
git checkout $pinnedDbVersionTag
fi
cd ../
echo $SUPERTOKENS_API_KEY > apiPassword
./startTestingEnv --cicd
TEST_EXIT_CODE=$?
if [ -d ~/junit ]
then
echo "Copying output from core"
cp ~/supertokens-root/supertokens-core/build/test-results/test/*.xml ~/junit/
if [[ $pluginToTest != "sqlite" ]]
then
echo "Copying output from plugin"
cp ~/supertokens-root/supertokens-$pluginToTest-plugin/build/test-results/test/*.xml ~/junit/
fi
fi
if [[ $TEST_EXIT_CODE -ne 0 ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion FAILED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cat logs/*
cd ../project/
echo "test failed... exiting!"
exit 1
fi
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion SUCCEEDED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cd ..
rm -rf supertokens-root
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
curl -o supertokens.zip -s -X GET \
"https://api.supertokens.io/0/app/download?pluginName=$currPinnedDb&os=linux&mode=DEV&binary=FREE&targetCore=$coreVersion&targetPlugin=$pinnedDbVersion" \
-H 'api-version: 0'
unzip supertokens.zip -d .
rm supertokens.zip
cd supertokens
../project/.circleci/testCli.sh
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
cd ../
fi
rm -rf supertokens
cd project/.circleci
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
./stopDb.sh $currPinnedDb
fi
fi
done
done 10<pluginInterfaceExactVersionsOutput
if [[ $someTestsRan = "true" ]]
then
echo "tests ran successfully"
else
echo "no test ran"
exit 1
fi

View File

@ -1,19 +0,0 @@
# args: <length of array> <array like ["0.0", "0.1"]>
touch pluginInterfaceExactVersionsOutput
i=0
while [ $i -lt $1 ]; do
currVersion=`echo $2 | jq ".[$i]"`
currVersion=`echo $currVersion | tr -d '"'`
i=$((i+1))
# now we have the current version like 0.0.
# We now have to find something that matches dev-v0.0.* or v0.0.*
response=`curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$currVersion" \
-H 'api-version: 0'`
if [[ `echo $response | jq .tag` == "null" ]]
then
echo $response
exit 1
fi
echo $response >> pluginInterfaceExactVersionsOutput
done

View File

@ -1,29 +0,0 @@
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
echo "calling /core PATCH to make testing passed"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PATCH \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"testPassed\": true
}")
if [ "$responseStatus" -ne "200" ]
then
echo "patch api failed"
exit 1
fi

View File

@ -1 +0,0 @@
chown -R mysql:mysql /var/lib/mysql /var/run/mysqld && service mysql start

View File

@ -1,113 +0,0 @@
case $1 in
mysql)
(cd / && ./runMySQL.sh)
mysql -u root --password=root -e "CREATE DATABASE supertokens;"
mysql -u root --password=root -e "CREATE DATABASE st0;"
mysql -u root --password=root -e "CREATE DATABASE st1;"
mysql -u root --password=root -e "CREATE DATABASE st2;"
mysql -u root --password=root -e "CREATE DATABASE st3;"
mysql -u root --password=root -e "CREATE DATABASE st4;"
mysql -u root --password=root -e "CREATE DATABASE st5;"
mysql -u root --password=root -e "CREATE DATABASE st6;"
mysql -u root --password=root -e "CREATE DATABASE st7;"
mysql -u root --password=root -e "CREATE DATABASE st8;"
mysql -u root --password=root -e "CREATE DATABASE st9;"
mysql -u root --password=root -e "CREATE DATABASE st10;"
mysql -u root --password=root -e "CREATE DATABASE st11;"
mysql -u root --password=root -e "CREATE DATABASE st12;"
mysql -u root --password=root -e "CREATE DATABASE st13;"
mysql -u root --password=root -e "CREATE DATABASE st14;"
mysql -u root --password=root -e "CREATE DATABASE st15;"
mysql -u root --password=root -e "CREATE DATABASE st16;"
mysql -u root --password=root -e "CREATE DATABASE st17;"
mysql -u root --password=root -e "CREATE DATABASE st18;"
mysql -u root --password=root -e "CREATE DATABASE st19;"
mysql -u root --password=root -e "CREATE DATABASE st20;"
mysql -u root --password=root -e "CREATE DATABASE st21;"
mysql -u root --password=root -e "CREATE DATABASE st22;"
mysql -u root --password=root -e "CREATE DATABASE st23;"
mysql -u root --password=root -e "CREATE DATABASE st24;"
mysql -u root --password=root -e "CREATE DATABASE st25;"
mysql -u root --password=root -e "CREATE DATABASE st26;"
mysql -u root --password=root -e "CREATE DATABASE st27;"
mysql -u root --password=root -e "CREATE DATABASE st28;"
mysql -u root --password=root -e "CREATE DATABASE st29;"
mysql -u root --password=root -e "CREATE DATABASE st30;"
mysql -u root --password=root -e "CREATE DATABASE st31;"
mysql -u root --password=root -e "CREATE DATABASE st32;"
mysql -u root --password=root -e "CREATE DATABASE st33;"
mysql -u root --password=root -e "CREATE DATABASE st34;"
mysql -u root --password=root -e "CREATE DATABASE st35;"
mysql -u root --password=root -e "CREATE DATABASE st36;"
mysql -u root --password=root -e "CREATE DATABASE st37;"
mysql -u root --password=root -e "CREATE DATABASE st38;"
mysql -u root --password=root -e "CREATE DATABASE st39;"
mysql -u root --password=root -e "CREATE DATABASE st40;"
mysql -u root --password=root -e "CREATE DATABASE st41;"
mysql -u root --password=root -e "CREATE DATABASE st42;"
mysql -u root --password=root -e "CREATE DATABASE st43;"
mysql -u root --password=root -e "CREATE DATABASE st44;"
mysql -u root --password=root -e "CREATE DATABASE st45;"
mysql -u root --password=root -e "CREATE DATABASE st46;"
mysql -u root --password=root -e "CREATE DATABASE st47;"
mysql -u root --password=root -e "CREATE DATABASE st48;"
mysql -u root --password=root -e "CREATE DATABASE st49;"
mysql -u root --password=root -e "CREATE DATABASE st50;"
;;
postgresql)
/etc/init.d/postgresql start
sudo -u postgres psql --command "CREATE USER root WITH SUPERUSER PASSWORD 'root';"
createdb
psql -c "create database supertokens;"
psql -c "create database st0;"
psql -c "create database st1;"
psql -c "create database st2;"
psql -c "create database st3;"
psql -c "create database st4;"
psql -c "create database st5;"
psql -c "create database st6;"
psql -c "create database st7;"
psql -c "create database st8;"
psql -c "create database st9;"
psql -c "create database st10;"
psql -c "create database st11;"
psql -c "create database st12;"
psql -c "create database st13;"
psql -c "create database st14;"
psql -c "create database st15;"
psql -c "create database st16;"
psql -c "create database st17;"
psql -c "create database st18;"
psql -c "create database st19;"
psql -c "create database st20;"
psql -c "create database st21;"
psql -c "create database st22;"
psql -c "create database st23;"
psql -c "create database st24;"
psql -c "create database st25;"
psql -c "create database st26;"
psql -c "create database st27;"
psql -c "create database st28;"
psql -c "create database st29;"
psql -c "create database st30;"
psql -c "create database st31;"
psql -c "create database st32;"
psql -c "create database st33;"
psql -c "create database st34;"
psql -c "create database st35;"
psql -c "create database st36;"
psql -c "create database st37;"
psql -c "create database st38;"
psql -c "create database st39;"
psql -c "create database st40;"
psql -c "create database st41;"
psql -c "create database st42;"
psql -c "create database st43;"
psql -c "create database st44;"
psql -c "create database st45;"
psql -c "create database st46;"
psql -c "create database st47;"
psql -c "create database st48;"
psql -c "create database st49;"
psql -c "create database st50;"
esac

View File

@ -1,8 +0,0 @@
case $1 in
mysql)
service mysql stop
;;
postgresql)
service postgresql stop
;;
esac

View File

@ -1,72 +0,0 @@
# inside supertokens downloaded zip
./install
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens start --port=8888
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
sed -i 's/# mysql_user:/mysql_user: root/g' /usr/lib/supertokens/config.yaml
sed -i 's/# mysql_password:/mysql_password: root/g' /usr/lib/supertokens/config.yaml
sed -i 's/# mongodb_connection_uri:/mongodb_connection_uri: mongodb:\/\/root:root@localhost:27017/g' /usr/lib/supertokens/config.yaml
sed -i 's/# disable_telemetry:/disable_telemetry: true/g' /usr/lib/supertokens/config.yaml
supertokens start --port=8889
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8889/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8888/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens stop
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens uninstall
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi

View File

@ -0,0 +1,56 @@
import json
import os
import http.client
def register_core_version(supertokens_api_key, core_version, plugin_interface_array, core_driver_array):
print("Core Version: ", core_version)
print("Plugin Interface Array: ", plugin_interface_array)
print("Core Driver Array: ", core_driver_array)
conn = http.client.HTTPSConnection("api.supertokens.io")
payload = {
"password": supertokens_api_key,
"planType": "FREE",
"version": core_version,
"pluginInterfaces": plugin_interface_array,
"coreDriverInterfaces": core_driver_array
}
headers = {
'Content-Type': 'application/json',
'api-version': '0'
}
conn.request("PUT", "/0/core", json.dumps(payload), headers)
response = conn.getresponse()
if response.status != 200:
print(f"failed core PUT API status code: {response.status}. Exiting!")
exit(1)
conn.close()
def read_core_version():
with open('build.gradle', 'r') as file:
for line in file:
if 'version =' in line:
return line.split('=')[1].strip().strip("'\"")
raise Exception("Could not find version in build.gradle")
core_version = read_core_version()
with open('pluginInterfaceSupported.json', 'r') as fd:
plugin_interface_array = json.load(fd)['versions']
with open('coreDriverInterfaceSupported.json', 'r') as fd:
core_driver_array = json.load(fd)['versions']
register_core_version(
supertokens_api_key=os.environ.get("SUPERTOKENS_API_KEY"),
core_version=core_version,
plugin_interface_array=plugin_interface_array,
core_driver_array=core_driver_array
)

View File

@ -0,0 +1,68 @@
import json
import os
import subprocess
import http.client
def register_plugin_version(supertokens_api_key, plugin_version, plugin_interface_array, plugin_name):
print("Plugin Version: ", plugin_version)
print("Plugin Interface Array: ", plugin_interface_array)
print("Plugin Name: ", plugin_name)
conn = http.client.HTTPSConnection("api.supertokens.io")
payload = {
"password": supertokens_api_key,
"planType": "FREE",
"version": plugin_version,
"pluginInterfaces": plugin_interface_array,
"name": plugin_name
}
headers = {
'Content-Type': 'application/json',
'api-version': '0'
}
conn.request("PUT", "/0/plugin", json.dumps(payload), headers)
response = conn.getresponse()
if response.status != 200:
print(f"failed plugin PUT API status code: {response.status}. Exiting!")
print(f"response: {str(response.read())}")
exit(1)
conn.close()
def read_plugin_version():
with open('build.gradle', 'r') as file:
for line in file:
if 'version =' in line:
return line.split('=')[1].strip().strip("'\"")
raise Exception("Could not find version in build.gradle")
plugin_version = read_plugin_version()
with open('pluginInterfaceSupported.json', 'r') as fd:
plugin_interface_array = json.load(fd)['versions']
def check_if_tag_exists(tag):
try:
result = subprocess.run(['git', 'tag', '-l', tag], capture_output=True, text=True)
return tag in result.stdout
except subprocess.CalledProcessError:
print(f"Error checking for tag {tag}")
return False
dev_tag = f"dev-v{plugin_version}"
if not check_if_tag_exists(dev_tag):
print(f"Tag {dev_tag} does not exist. Exiting!")
exit(0)
register_plugin_version(
supertokens_api_key=os.environ.get("SUPERTOKENS_API_KEY"),
plugin_version=plugin_version,
plugin_interface_array=plugin_interface_array,
plugin_name=os.environ.get("PLUGIN_NAME")
)

39
.github/helpers/release-docker.sh vendored Normal file
View File

@ -0,0 +1,39 @@
#!/bin/bash
set -e
# Check for required arguments
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <source-image:tag> <target-image:tag>"
exit 1
fi
SOURCE_IMAGE="$1"
TARGET_IMAGE="$2"
# Platforms to support
PLATFORMS=("linux/amd64" "linux/arm64")
TEMP_IMAGES=()
# Pull, retag, and push platform-specific images
for PLATFORM in "${PLATFORMS[@]}"; do
ARCH=$(echo $PLATFORM | cut -d'/' -f2)
TEMP_TAG="${TARGET_IMAGE}-${ARCH}"
TEMP_IMAGES+=("$TEMP_TAG")
echo "Pulling $SOURCE_IMAGE for $PLATFORM..."
docker pull --platform $PLATFORM "$SOURCE_IMAGE"
echo "Tagging as $TEMP_TAG..."
docker tag "$SOURCE_IMAGE" "$TEMP_TAG"
echo "Pushing $TEMP_TAG..."
docker push "$TEMP_TAG"
done
# Create and push manifest for multi-arch image
echo "Creating and pushing multi-arch manifest for $TARGET_IMAGE..."
docker manifest create "$TARGET_IMAGE" "${TEMP_IMAGES[@]}"
docker manifest push "$TARGET_IMAGE"
echo "✅ Multi-arch image pushed as $TARGET_IMAGE"

55
.github/helpers/wait-for-docker.py vendored Normal file
View File

@ -0,0 +1,55 @@
import http.client
import json
import time
import os
import sys
REPO = "supertokens/supertokens-core"
SHA = os.environ.get("GITHUB_SHA")
NAME = os.environ.get("WORKFLOW_NAME", "Publish Dev Docker Image")
st = time.time()
def get_latest_actions():
conn = http.client.HTTPSConnection("api.github.com")
url = f"/repos/{REPO}/actions/runs"
headers = {"User-Agent": "Python-http.client"}
conn.request("GET", url, headers=headers)
response = conn.getresponse()
if response.status == 200:
data = response.read()
runs = json.loads(data)['workflow_runs']
found = False
for run in runs:
if run['head_sha'] == SHA and run['name'] == NAME:
found = True
break
if not found:
print("No matching workflow run found.")
sys.exit(1)
if run["status"] == "completed":
if run["conclusion"] == "success":
print("Workflow completed successfully.")
return True
else:
print(f"Workflow failed with conclusion: {run['conclusion']}")
sys.exit(1)
else:
print(f"Failed to fetch workflow runs: {response.status} {response.reason}")
sys.exit(1)
return False
time.sleep(30) # Wait for 30 seconds before checking
while not get_latest_actions():
print("Waiting for the latest actions to complete...")
time.sleep(10)
if time.time() - st > 600:
print("Timed out waiting for the latest actions.")
sys.exit(1)

107
.github/workflows/add-dev-tag.yml vendored Normal file
View File

@ -0,0 +1,107 @@
name: Add dev tags for release
on:
workflow_dispatch:
inputs:
core-version:
description: 'Core version'
required: true
type: string
plugin-interface-version:
description: 'Plugin interface version'
required: true
type: string
new-release-for-plugin-interface:
description: 'New release for plugin interface'
required: true
type: boolean
postgresql-plugin-version:
description: 'Postgres plugin version'
required: true
new-release-for-postgresql-plugin:
description: 'New release for postgres plugin'
required: true
type: boolean
jobs:
dependency-branches:
name: Dependency Branches
environment: publish
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: add-dev-tag
core-version: ${{ github.event.inputs.core-version }}
plugin-interface-version: ${{ github.event.inputs.plugin-interface-version }}
postgresql-plugin-version: ${{ github.event.inputs.postgresql-plugin-version }}
add-dev-tag:
environment: publish
runs-on: ubuntu-latest
needs: dependency-branches
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-core.git
cd supertokens-core
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['core'] }}
- name: Checkout supertokens-plugin-interface
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-plugin-interface.git
cd supertokens-plugin-interface
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- name: Checkout supertokens-postgresql-plugin
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-postgresql-plugin.git
cd supertokens-postgresql-plugin
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['postgresql'] }}
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
postgresql-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Git config
run: |
git config --global user.name "Supertokens Bot"
git config --global user.email "<>"
- name: Add dev tag to plugin interface
if: ${{ github.event.inputs.new-release-for-plugin-interface == 'true' }}
run: |
echo "Adding dev tag to plugin interface"
cd supertokens-root/supertokens-plugin-interface
./addDevTag
- name: Add dev tag to postgres plugin
if: ${{ github.event.inputs.new-release-for-postgresql-plugin == 'true' }}
run: |
echo "Adding dev tag to postgres plugin"
cd supertokens-root/supertokens-postgresql-plugin
./addDevTag
- name: Add dev tag to core
run: |
echo "Adding dev tag to core"
cd supertokens-root/supertokens-core
./addDevTag

153
.github/workflows/dev-tag.yml vendored Normal file
View File

@ -0,0 +1,153 @@
name: Checks for release
on:
push:
branches:
- '[0-9]+.[0-9]+'
tags:
- 'dev-*'
jobs:
dependency-versions:
name: Dependency Versions
runs-on: ubuntu-latest
outputs:
versions: ${{ steps.result.outputs.versions }}
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
with:
run-for: PR
id: result
new-core-version:
environment: publish
name: New core version
runs-on: ubuntu-latest
needs: [dependency-versions]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Run script
env:
SUPERTOKENS_API_KEY: ${{ secrets.SUPERTOKENS_API_KEY }}
run: |
python .github/helpers/register-new-core-version.py
new-plugin-versions:
environment: publish
name: New plugin versions
runs-on: ubuntu-latest
needs: [dependency-versions]
strategy:
fail-fast: false
matrix:
plugin:
- postgresql
# no longer supported
# - mysql
# - mongodb
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Checkout
uses: actions/checkout@v4
with:
path: ./supertokens-plugin
repository: supertokens/supertokens-${{ matrix.plugin }}-plugin
ref: ${{ fromJson(needs.dependency-versions.outputs.branches)[matrix.plugin] }}
fetch-depth: 0
fetch-tags: true
- name: Run script
env:
SUPERTOKENS_API_KEY: ${{ secrets.SUPERTOKENS_API_KEY }}
PLUGIN_NAME: ${{ matrix.plugin }}
run: |
cd supertokens-plugin
python ../.github/helpers/register-new-plugin-version.py
unit-tests:
name: Run unit tests
needs: [new-core-version, new-plugin-versions]
uses: ./.github/workflows/unit-test.yml
wait-for-docker:
name: Wait for Docker
runs-on: ubuntu-latest
needs: [new-core-version, new-plugin-versions]
outputs:
tag: ${{ steps.set_tag.outputs.TAG }}
steps:
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Checkout
uses: actions/checkout@v4
- name: Wait for Docker build
env:
SHA: ${{ github.sha }}
run: |
python .github/helpers/wait-for-docker.py
- name: set tag
id: set_tag
run: |
echo "TAG=${GITHUB_REF}" | sed 's/refs\/heads\///g' | sed 's/\//_/g' >> $GITHUB_OUTPUT
stress-tests:
needs: [wait-for-docker]
uses: ./.github/workflows/stress-tests.yml
with:
tag: ${{ needs.wait-for-docker.outputs.tag }}
mark-as-passed:
environment: publish
needs: [dependency-versions, unit-tests, stress-tests]
name: Mark as passed
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
plugin:
- sqlite
- postgresql
# no longer supported
# - mysql
# - mongodb
steps:
- name: Mark plugin as passed
if: matrix.plugin != 'sqlite' && fromJson(needs.dependency-versions.outputs.versions)[matrix.plugin] != ''
uses: muhfaris/request-action@main
with:
url: https://api.supertokens.io/0/plugin
method: PATCH
headers: |
{
"Content-Type": "application/json",
"api-version": "0"
}
body: |
{
"password": "${{ secrets.SUPERTOKENS_API_KEY }}",
"version": "${{ fromJson(needs.dependency-versions.outputs.versions)[matrix.plugin] }}",
"planType": "FREE",
"name": "${{ matrix.plugin }}",
"testPassed": true
}
- name: Mark core as passed
if: matrix.plugin == 'sqlite' && fromJson(needs.dependency-versions.outputs.versions)['core'] != ''
uses: muhfaris/request-action@main
with:
url: https://api.supertokens.io/0/core
method: PATCH
headers: |
{
"Content-Type": "application/json",
"api-version": "0"
}
body: |
{
"password": "${{ secrets.SUPERTOKENS_API_KEY }}",
"version": "${{ fromJson(needs.dependency-versions.outputs.versions)['core'] }}",
"planType": "FREE",
"testPassed": true
}

148
.github/workflows/do-release.yml vendored Normal file
View File

@ -0,0 +1,148 @@
name: Do Release
on:
workflow_dispatch:
inputs:
core-version:
description: 'Core version'
required: true
type: string
plugin-interface-version:
description: 'Plugin interface version'
required: true
type: string
new-release-for-plugin-interface:
description: 'New release for plugin interface'
required: true
type: boolean
postgresql-plugin-version:
description: 'Postgres plugin version'
required: true
new-release-for-postgresql-plugin:
description: 'New release for postgres plugin'
required: true
type: boolean
is-latest-release:
description: 'Is this the latest release?'
required: true
type: boolean
jobs:
dependency-branches:
name: Dependency Branches
environment: publish
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
versions: ${{ steps.result.outputs.versions }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: add-dev-tag
core-version: ${{ github.event.inputs.core-version }}
plugin-interface-version: ${{ github.event.inputs.plugin-interface-version }}
postgresql-plugin-version: ${{ github.event.inputs.postgresql-plugin-version }}
release-docker:
environment: publish
name: Release Docker
runs-on: ubuntu-latest
needs: dependency-branches
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Tag and Push Docker Image
run: |
tag=${{ github.event.inputs.core-version }}
major=$(echo $tag | cut -d. -f1)
minor=$(echo $tag | cut -d. -f1,2)
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:$major
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:$minor
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:$tag
if [ "${{ github.event.inputs.is-latest-release }}" == "true" ]; then
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:latest
fi
add-release-tag:
environment: publish
runs-on: ubuntu-latest
needs: [dependency-branches, release-docker]
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-core.git
cd supertokens-core
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['core'] }}
- name: Checkout supertokens-plugin-interface
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-plugin-interface.git
cd supertokens-plugin-interface
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- name: Checkout supertokens-postgresql-plugin
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-postgresql-plugin.git
cd supertokens-postgresql-plugin
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['postgresql'] }}
- name: Add release password
run: |
cd supertokens-root
echo "${{ secrets.SUPERTOKENS_API_KEY }}" > releasePassword
echo "${{ secrets.SUPERTOKENS_API_KEY }}" > apiPassword
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
postgresql-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Git config
run: |
git config --global user.name "Supertokens Bot"
git config --global user.email "<>"
- name: Add release tag to plugin interface
if: ${{ github.event.inputs.new-release-for-plugin-interface == 'true' }}
run: |
echo "Adding release tag to plugin interface"
cd supertokens-root/supertokens-plugin-interface
./addReleaseTag
- name: Add release tag to postgres plugin
if: ${{ github.event.inputs.new-release-for-postgresql-plugin == 'true' }}
run: |
echo "Adding release tag to postgres plugin"
cd supertokens-root/supertokens-postgresql-plugin
./addReleaseTag
- name: Add release tag to core
run: |
echo "Adding release tag to core"
cd supertokens-root/supertokens-core
./addReleaseTag

View File

@ -1,15 +0,0 @@
name: "Enforcing changelog in PRs Workflow"
on:
pull_request:
types: [ opened, synchronize, reopened, ready_for_review, labeled, unlabeled ]
jobs:
# Enforces the update of a changelog file on every pull request
changelog:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'

View File

@ -1,20 +0,0 @@
name: "Lint PR Title"
on:
pull_request:
types:
- opened
- reopened
- edited
- synchronize
jobs:
pr-title:
name: Lint PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true

27
.github/workflows/pr-checks.yml vendored Normal file
View File

@ -0,0 +1,27 @@
name: PR Checks
on:
pull_request:
types: [ opened, synchronize, reopened, ready_for_review, labeled, unlabeled ]
jobs:
pr-title:
name: Lint PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true
changelog:
name: Enforce Changelog
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'
unit-tests:
name: Run unit tests
uses: ./.github/workflows/unit-test.yml

102
.github/workflows/publish-dev-docker.yml vendored Normal file
View File

@ -0,0 +1,102 @@
name: Publish Dev Docker Image
on:
push:
branches:
- "**"
tags:
- 'dev-*'
jobs:
dependency-branches:
name: Dependency Branches
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: PR
docker:
name: Docker
runs-on: ubuntu-latest
needs: dependency-branches
outputs:
tag: ${{ steps.set_tag.outputs.TAG }}
strategy:
fail-fast: false
matrix:
plugin:
- postgresql
# no longer supported
# - mysql
# - mongodb
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-plugin-interface
path: ./supertokens-root/supertokens-plugin-interface
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- uses: actions/checkout@v2
if: matrix.plugin != 'sqlite'
with:
repository: supertokens/supertokens-${{ matrix.plugin }}-plugin
path: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)[matrix.plugin] }}
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
${{ matrix.plugin }}-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Generate config file
run: |
cd supertokens-root
touch config_temp.yaml
cat supertokens-core/config.yaml >> config_temp.yaml
cat supertokens-${{ matrix.plugin }}-plugin/config.yaml >> config_temp.yaml
mv config_temp.yaml config.yaml
- name: set tag
id: set_tag
run: |
echo "TAG=${GITHUB_REF}" | sed 's/refs\/heads\///g' | sed 's/\//_/g' >> $GITHUB_OUTPUT
-
name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push
uses: docker/build-push-action@v6
with:
push: true
context: ./supertokens-root
tags: supertokens/supertokens-dev-${{ matrix.plugin }}:${{ steps.set_tag.outputs.TAG }}
file: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin/.github/helpers/docker/Dockerfile
platforms: linux/amd64,linux/arm64

47
.github/workflows/stress-tests.yml vendored Normal file
View File

@ -0,0 +1,47 @@
name: Stress Tests
on:
workflow_call:
inputs:
tag:
description: 'Docker image tag to use'
required: true
type: string
jobs:
stress-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
- name: Install dependencies
run: |
cd stress-tests
npm install
- name: Update Docker image in compose
run: |
cd stress-tests
sed -i 's|supertokens/supertokens-postgresql|supertokens/supertokens-dev-postgresql:${{ inputs.tag }}|' docker-compose.yml
cat docker-compose.yml
- name: Bring up the services
run: |
cd stress-tests
docker compose up -d
- name: Generate user jsons
run: |
cd stress-tests
npm run generate-users
- name: Run one million users test
id: one-million-users
run: |
cd stress-tests
npm run one-million-users | tee stress-tests.log
- name: Display Test Statistics
run: |
echo "## Stress Test Results" >> $GITHUB_STEP_SUMMARY
echo "| Test | Duration |" >> $GITHUB_STEP_SUMMARY
echo "|------|----------|" >> $GITHUB_STEP_SUMMARY
jq -r '.measurements[] | "| \(.title) | \(.formatted) |"' stress-tests/stats.json >> $GITHUB_STEP_SUMMARY

View File

@ -1,24 +0,0 @@
name: "Check if \"Run tests\" action succeeded"
on:
pull_request:
types:
- opened
- reopened
- edited
- synchronize
jobs:
pr-run-test-action:
name: Check if "Run tests" action succeeded
timeout-minutes: 60
concurrency:
group: ${{ github.head_ref }}
cancel-in-progress: true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: node install
run: cd ./.github/helpers && npm i
- name: Calling github API
run: cd ./.github/helpers && GITHUB_TOKEN=${{ github.token }} REPO=${{ github.repository }} RUN_ID=${{ github.run_id }} BRANCH=${{ github.head_ref }} JOB_ID=${{ github.job }} SOURCE_OWNER=${{ github.event.pull_request.head.repo.owner.login }} CURRENT_SHA=${{ github.event.pull_request.head.sha }} node node_modules/github-workflow-helpers/test-pass-check-pr.js

View File

@ -1,37 +0,0 @@
name: "Run tests"
on:
workflow_dispatch:
inputs:
pluginRepoOwnerName:
description: 'supertokens-plugin-interface repo owner name'
default: supertokens
required: true
pluginInterfaceBranch:
description: 'supertokens-plugin-interface repos branch name'
default: master
required: true
jobs:
test_job:
name: Run tests
timeout-minutes: 60
runs-on: ubuntu-latest
container: rishabhpoddar/supertokens_core_testing
steps:
- uses: actions/checkout@v2
- name: Cloning supertokens-root
run: cd ../ && git clone https://github.com/supertokens/supertokens-root.git
- name: Update Java 1
run: update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-15.0.1/bin/java" 2
- name: Update Java 2
run: update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-15.0.1/bin/javac" 2
- name: Modifying modules.txt in supertokens-root
run: cd ../supertokens-root && echo "core,master\nplugin-interface,${{ github.event.inputs.pluginInterfaceBranch }},${{ github.event.inputs.pluginRepoOwnerName }}" > modules.txt
- name: Contents of modules.txt
run: cat ../supertokens-root/modules.txt
- name: Running loadModules in supertokens-root
run: cd ../supertokens-root && ./loadModules
- name: Copying current supertokens-core branch into supertokens-root
run: cd ../supertokens-root && rm -rf ./supertokens-core && cp -r ../supertokens-core ./
- name: Building and running tests
run: cd ../supertokens-root && ./startTestingEnv

126
.github/workflows/unit-test.yml vendored Normal file
View File

@ -0,0 +1,126 @@
name: Unit Tests
on:
workflow_call:
env:
total-runners: 12
jobs:
dependency-branches:
name: Dependency Branches
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: PR
runner-indexes:
runs-on: ubuntu-latest
name: Generate runner indexes
needs: dependency-branches
outputs:
json: ${{ steps.generate-index-list.outputs.json }}
steps:
- id: generate-index-list
run: |
MAX_INDEX=$((${{ env.total-runners }}-1))
INDEX_LIST=$(seq 0 ${MAX_INDEX})
INDEX_JSON=$(jq --null-input --compact-output '. |= [inputs]' <<< ${INDEX_LIST})
echo "::set-output name=json::${INDEX_JSON}"
unit-tests:
runs-on: ubuntu-latest
name: "Unit tests: ${{ matrix.plugin }} plugin, runner #${{ matrix.runner-index }}"
needs:
- dependency-branches
- runner-indexes
strategy:
fail-fast: false
matrix:
runner-index: ${{ fromjson(needs.runner-indexes.outputs.json) }}
plugin:
- sqlite
- postgresql
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-plugin-interface
path: ./supertokens-root/supertokens-plugin-interface
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- uses: actions/checkout@v2
if: matrix.plugin != 'sqlite'
with:
repository: supertokens/supertokens-${{ matrix.plugin }}-plugin
path: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)[matrix.plugin] }}
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
${{ matrix.plugin }}-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Start ${{ matrix.plugin }} server
if: matrix.plugin != 'sqlite'
run: cd supertokens-root/supertokens-${{ matrix.plugin }}-plugin && ./startDb.sh
- name: Start oauth provider
run: |
docker run -d -p 4444:4444 -p 4445:4445 rishabhpoddar/oauth-server-cicd
- uses: chaosaffe/split-tests@v1-alpha.1
id: split-tests
name: Split tests
with:
glob: 'supertokens-root/*/src/test/java/**/*.java'
split-total: ${{ env.total-runners }}
split-index: ${{ matrix.runner-index }}
- run: 'echo "This runner will execute the following tests: ${{ steps.split-tests.outputs.test-suite }}"'
- name: Run tests
env:
ST_PLUGIN_NAME: ${{ matrix.plugin }}
run: |
cd supertokens-root
echo "./gradlew test \\" > test.sh
chmod +x test.sh
IFS=' ' read -ra TESTS <<< "${{ steps.split-tests.outputs.test-suite }}"
for test in "${TESTS[@]}"; do
test_name="${test%.java}"
test_name="${test_name#supertokens-root/supertokens-core/src/test/java/}"
test_name="${test_name//\//.}"
echo " --tests $test_name \\" >> test.sh
done
echo "" >> test.sh
echo "this is the test command:"
cat test.sh
echo "--------------------------------"
./test.sh
- name: Publish Test Report
uses: mikepenz/action-junit-report@v5
if: always()
with:
report_paths: '**/build/test-results/test/TEST-*.xml'
detailed_summary: true
include_passed: false
annotate_notice: true

View File

@ -7,6 +7,10 @@ to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## [9.3.2]
- Adds internal opentelemetry support for logging
## [9.3.1]
- Includes exception class name in 500 error message

View File

@ -19,8 +19,7 @@ compileTestJava { options.encoding = "UTF-8" }
// }
//}
version = "9.3.1"
version = "9.3.2"
repositories {
mavenCentral()
@ -38,9 +37,6 @@ dependencies {
// https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
// https://mvnrepository.com/artifact/ch.qos.logback/logback-classic
implementation group: 'ch.qos.logback', name: 'logback-classic', version: '1.4.14'
// https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-core
implementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '10.1.18'
@ -74,6 +70,19 @@ dependencies {
// https://mvnrepository.com/artifact/com.googlecode.libphonenumber/libphonenumber/
implementation group: 'com.googlecode.libphonenumber', name: 'libphonenumber', version: '8.13.25'
implementation platform("io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:2.17.0-alpha")
implementation("ch.qos.logback:logback-core:1.5.18")
implementation("ch.qos.logback:logback-classic:1.5.18")
// OpenTelemetry core
implementation("io.opentelemetry:opentelemetry-sdk")
implementation("io.opentelemetry:opentelemetry-exporter-otlp")
implementation("io.opentelemetry:opentelemetry-exporter-logging")
implementation("io.opentelemetry:opentelemetry-api")
implementation("io.opentelemetry.semconv:opentelemetry-semconv")
compileOnly project(":supertokens-plugin-interface")
testImplementation project(":supertokens-plugin-interface")

View File

@ -170,3 +170,7 @@ core_config_version: 0
# (Optional | Default: null) string value. The encryption key used for saving OAuth client secret on the database.
# oauth_client_secret_encryption_key:
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:

View File

@ -170,3 +170,7 @@ disable_telemetry: true
# (Optional | Default: null) string value. The encryption key used for saving OAuth client secret on the database.
# oauth_client_secret_encryption_key:
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:

View File

@ -1,120 +1,125 @@
{
"_comment": "Contains list of implementation dependencies URL for this project",
"list": [
{
"jar": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name": "Gson 2.3.1",
"src": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name": "Jackson Dataformat 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name": "SnakeYAML 2.2",
"src": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1.jar",
"name": "Jackson core 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name": "Jackson databind 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1.jar",
"name": "Jackson annotation 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/ch/qos/logback/logback-classic/1.4.14/logback-classic-1.4.14.jar",
"name": "Logback classic 1.4.14",
"src": "https://repo1.maven.org/maven2/ch/qos/logback/logback-classic/1.4.14/logback-classic-1.4.14-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/ch/qos/logback/logback-core/1.4.14/logback-core-1.4.14.jar",
"name": "Logback core 1.4.14",
"src": "https://repo1.maven.org/maven2/ch/qos/logback/logback-core/1.4.14/logback-core-1.4.14-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/slf4j/slf4j-api/2.0.7/slf4j-api-2.0.7.jar",
"name": "SLF4j API 2.0.7",
"src": "https://repo1.maven.org/maven2/org/slf4j/slf4j-api/2.0.7/slf4j-api-2.0.7-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18.jar",
"name": "Tomcat annotations API 10.1.18",
"src": "https://repo1.maven.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18.jar",
"name": "Tomcat embed core API 10.1.1",
"src": "https://repo1.maven.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
"name": "JSR305 3.0.2",
"src": "https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0.jar",
"name": "JSR305 3.0.2",
"src": "https://repo1.maven.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0.jar",
"name": "SQLite JDBC Driver 3.45.1.0",
"src": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name": "JBCrypt 0.4",
"src": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0.jar",
"name": "Auth0 Java JWT",
"src": "https://repo1.maven.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name": "Argon2-jvm 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11.jar",
"name": "Argon2-jvm no libs 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0.jar",
"name": "JNA 5.8.0",
"src": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0.jar",
"name": "Scrypt 1.4.0",
"src": "https://repo1.maven.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0.jar",
"name": "Java OTP 0.4.0",
"src": "https://repo1.maven.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15.jar",
"name": "Commons Codec 1.15",
"src": "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25.jar",
"name": "Libphonenumber 8.13.25",
"src": "https://repo1.maven.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25-sources.jar"
}
]
"_comment": "Contains list of implementation dependencies URL for this project. This is a generated file, don't modify the contents by hand.",
"list": [
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name":"gson 2.3.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name":"jackson-dataformat-yaml 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name":"snakeyaml 2.2",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name":"jackson-databind 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18.jar",
"name":"tomcat-embed-core 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18.jar",
"name":"tomcat-annotations-api 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
"name":"jsr305 3.0.2",
"src":"https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0.jar",
"name":"sqlite-jdbc 3.45.1.0",
"src":"https://repo.maven.apache.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/2.0.17/slf4j-api-2.0.17.jar",
"name":"slf4j-api 2.0.17",
"src":"https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/2.0.17/slf4j-api-2.0.17-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name":"jbcrypt 0.4",
"src":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0.jar",
"name":"annotations 13.0",
"src":"https://repo.maven.apache.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name":"argon2-jvm 2.11",
"src":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0.jar",
"name":"java-jwt 4.4.0",
"src":"https://repo.maven.apache.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0.jar",
"name":"scrypt 1.4.0",
"src":"https://repo.maven.apache.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0.jar",
"name":"java-otp 0.4.0",
"src":"https://repo.maven.apache.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15.jar",
"name":"commons-codec 1.15",
"src":"https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25.jar",
"name":"libphonenumber 8.13.25",
"src":"https://repo.maven.apache.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/1.5.18/logback-core-1.5.18.jar",
"name":"logback-core 1.5.18",
"src":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/1.5.18/logback-core-1.5.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/1.5.18/logback-classic-1.5.18.jar",
"name":"logback-classic 1.5.18",
"src":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/1.5.18/logback-classic-1.5.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-api/1.51.0/opentelemetry-api-1.51.0.jar",
"name":"opentelemetry-api 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-api/1.51.0/opentelemetry-api-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-logging/1.51.0/opentelemetry-exporter-logging-1.51.0.jar",
"name":"opentelemetry-exporter-logging 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-logging/1.51.0/opentelemetry-exporter-logging-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-sdk/1.51.0/opentelemetry-sdk-1.51.0.jar",
"name":"opentelemetry-sdk 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-sdk/1.51.0/opentelemetry-sdk-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-otlp/1.51.0/opentelemetry-exporter-otlp-1.51.0.jar",
"name":"opentelemetry-exporter-otlp 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-otlp/1.51.0/opentelemetry-exporter-otlp-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/semconv/opentelemetry-semconv/1.34.0/opentelemetry-semconv-1.34.0.jar",
"name":"opentelemetry-semconv 1.34.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/semconv/opentelemetry-semconv/1.34.0/opentelemetry-semconv-1.34.0-sources.jar"
}
]
}

View File

@ -41,6 +41,7 @@ import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.version.Version;
import io.supertokens.webserver.Webserver;
import org.jetbrains.annotations.TestOnly;
@ -159,6 +160,8 @@ public class Main {
Logging.info(this, TenantIdentifier.BASE_TENANT, "Completed config.yaml loading.", true);
TelemetryProvider.initialize(this);
// loading storage layer
try {
StorageLayer.initPrimary(this, CLIOptions.get(this).getInstallationPath() + "plugin/",
@ -427,6 +430,7 @@ public class Main {
StorageLayer.close(this);
removeDotStartedFileForThisProcess();
Logging.stopLogging(this);
TelemetryProvider.closeTelemetry(this);
// uncomment this when you want to confirm that processes are actually shut.
// printRunningThreadNames();

View File

@ -344,6 +344,14 @@ public class CoreConfig {
@IgnoreForAnnotationCheck
private boolean isNormalizedAndValid = false;
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"The URL of the OpenTelemetry collector to which the core will send telemetry data. " +
"This should be in the format http://<host>:<port> or https://<host>:<port>. (Default: " +
"http://localhost:4317)")
private String otel_collector_connection_uri = "http://localhost:4317";
@IgnoreForAnnotationCheck
private static boolean disableOAuthValidationForTest = false;
@ -579,6 +587,10 @@ public class CoreConfig {
return webserver_https_enabled;
}
public String getOtelCollectorConnectionURI() {
return otel_collector_connection_uri;
}
private String getConfigFileLocation(Main main) {
return new File(CLIOptions.get(main).getConfigFilePath() == null
? CLIOptions.get(main).getInstallationPath() + "config.yaml"

View File

@ -29,6 +29,7 @@ import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.utils.Utils;
import io.supertokens.version.Version;
import io.supertokens.webserver.Webserver;
@ -110,6 +111,7 @@ public class Logging extends ResourceDistributor.SingletonResource {
msg = prependTenantIdentifierToMessage(tenantIdentifier, msg);
if (getInstance(main) != null) {
getInstance(main).infoLogger.debug(msg);
TelemetryProvider.createLogEvent(main, tenantIdentifier, msg, "debug");
}
} catch (NullPointerException e) {
// sometimes logger.debug throws a null pointer exception...
@ -133,6 +135,8 @@ public class Logging extends ResourceDistributor.SingletonResource {
if (getInstance(main) != null) {
getInstance(main).infoLogger.info(msg);
}
TelemetryProvider.createLogEvent(main, tenantIdentifier, msg, "info");
} catch (NullPointerException ignored) {
}
}
@ -146,6 +150,8 @@ public class Logging extends ResourceDistributor.SingletonResource {
msg = prependTenantIdentifierToMessage(tenantIdentifier, msg);
if (getInstance(main) != null) {
getInstance(main).errorLogger.warn(msg);
TelemetryProvider.createLogEvent(main, tenantIdentifier, msg, "warn");
}
} catch (NullPointerException ignored) {
}
@ -166,7 +172,10 @@ public class Logging extends ResourceDistributor.SingletonResource {
err = err.trim();
err = prependTenantIdentifierToMessage(tenantIdentifier, err);
if (getInstance(main) != null) {
getInstance(main).errorLogger.error(err);
TelemetryProvider.createLogEvent(main, tenantIdentifier, err, "error");
}
if (toConsoleAsWell || getInstance(main) == null) {
systemErr(err);
@ -200,6 +209,9 @@ public class Logging extends ResourceDistributor.SingletonResource {
message = prependTenantIdentifierToMessage(tenantIdentifier, message);
if (getInstance(main) != null) {
getInstance(main).errorLogger.error(message);
TelemetryProvider
.createLogEvent(main, tenantIdentifier, message,
"error");
}
if (toConsoleAsWell || getInstance(main) == null) {
systemErr(message);

View File

@ -0,0 +1,167 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.telemetry;
import io.opentelemetry.api.GlobalOpenTelemetry;
import io.opentelemetry.api.OpenTelemetry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.Span;
import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.propagation.ContextPropagators;
import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter;
import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter;
import io.opentelemetry.sdk.OpenTelemetrySdk;
import io.opentelemetry.sdk.logs.SdkLoggerProvider;
import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.trace.SdkTracerProvider;
import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.config.Config;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import org.jetbrains.annotations.TestOnly;
import java.util.concurrent.TimeUnit;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME;
public class TelemetryProvider extends ResourceDistributor.SingletonResource {
private static final String RESOURCE_ID = "io.supertokens.telemetry.TelemetryProvider";
private final OpenTelemetry openTelemetry;
private static synchronized TelemetryProvider getInstance(Main main) {
TelemetryProvider instance = null;
try {
instance = (TelemetryProvider) main.getResourceDistributor()
.getResource(TenantIdentifier.BASE_TENANT, RESOURCE_ID);
} catch (TenantOrAppNotFoundException ignored) {
}
return instance;
}
public static void initialize(Main main) {
main.getResourceDistributor()
.setResource(TenantIdentifier.BASE_TENANT, RESOURCE_ID, new TelemetryProvider(main));
}
public static void createLogEvent(Main main, TenantIdentifier tenantIdentifier, String logMessage,
String logLevel) {
getInstance(main).openTelemetry.getTracer("core-tracer")
.spanBuilder(logLevel)
.setParent(Context.current())
.setAttribute("tenant.connectionUriDomain", tenantIdentifier.getConnectionUriDomain())
.setAttribute("tenant.appId", tenantIdentifier.getAppId())
.setAttribute("tenant.tenantId", tenantIdentifier.getTenantId())
.startSpan()
.addEvent("log",
Attributes.builder()
.put("message", logMessage)
.build(),
System.currentTimeMillis(), TimeUnit.MILLISECONDS)
.end();
}
public static Span startSpan(Main main, TenantIdentifier tenantIdentifier, String spanName) {
Span span = getInstance(main).openTelemetry.getTracer("core-tracer")
.spanBuilder(spanName)
.setParent(Context.current())
.setAttribute("tenant.connectionUriDomain", tenantIdentifier.getConnectionUriDomain())
.setAttribute("tenant.appId", tenantIdentifier.getAppId())
.setAttribute("tenant.tenantId", tenantIdentifier.getTenantId())
.startSpan();
span.makeCurrent(); // Set the span as the current context
return span;
}
public static Span endSpan(Span span) {
if (span != null) {
span.end();
}
return span;
}
public static Span addEventToSpan(Span span, String eventName, Attributes attributes) {
if (span != null) {
span.addEvent(eventName, attributes, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
}
return span;
}
private static OpenTelemetry initializeOpenTelemetry(Main main) {
if (getInstance(main) != null && getInstance(main).openTelemetry != null) {
return getInstance(main).openTelemetry; // already initialized
}
Resource resource = Resource.getDefault().toBuilder()
.put(SERVICE_NAME, "supertokens-core")
.build();
String collectorUri = Config.getBaseConfig(main).getOtelCollectorConnectionURI();
SdkTracerProvider sdkTracerProvider =
SdkTracerProvider.builder()
.setResource(resource)
.addSpanProcessor(SimpleSpanProcessor.create(OtlpGrpcSpanExporter.builder()
.setEndpoint(collectorUri) // otel collector
.build()))
.build();
OpenTelemetrySdk sdk =
OpenTelemetrySdk.builder()
.setTracerProvider(sdkTracerProvider)
.setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance()))
.setLoggerProvider(
SdkLoggerProvider.builder()
.setResource(resource)
.addLogRecordProcessor(
BatchLogRecordProcessor.builder(
OtlpGrpcLogRecordExporter.builder()
.setEndpoint(collectorUri)
.build())
.build())
.build())
.build();
// Add hook to close SDK, which flushes logs
Runtime.getRuntime().addShutdownHook(new Thread(sdk::close));
return sdk;
}
@TestOnly
public static void resetForTest() {
GlobalOpenTelemetry.resetForTest();
}
public static void closeTelemetry(Main main) {
OpenTelemetry telemetry = getInstance(main).openTelemetry;
if (telemetry instanceof OpenTelemetrySdk) {
((OpenTelemetrySdk) telemetry).close();
}
}
private TelemetryProvider(Main main) {
openTelemetry = initializeOpenTelemetry(main);
}
}

View File

@ -23,6 +23,7 @@ import io.supertokens.config.CoreConfig;
import io.supertokens.pluginInterface.PluginInterfaceTesting;
import io.supertokens.pluginInterface.useridmapping.UserIdMapping;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.test.httpRequest.HttpRequestForTesting;
import io.supertokens.test.httpRequest.HttpResponseException;
import io.supertokens.useridmapping.UserIdType;
@ -72,6 +73,8 @@ public abstract class Utils extends Mockito {
} catch (Exception ignored) {
}
TelemetryProvider.resetForTest();
} catch (Exception e) {
e.printStackTrace();
}

View File

@ -110,6 +110,7 @@ public class RefreshTokenTest {
TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
long createdTime = System.currentTimeMillis();
TokenInfo tokenInfo = RefreshToken.createNewRefreshToken(process.getProcess(), "sessionHandle", "userId",
"parentRefreshTokenHash1", "antiCsrfToken");
@ -131,9 +132,9 @@ public class RefreshTokenTest {
assertEquals("antiCsrfToken", infoFromToken.antiCsrfToken);
assertNull(infoFromToken.parentRefreshTokenHash2);
assertSame(infoFromToken.type, TYPE.FREE_OPTIMISED);
// -5000 for some grace period for creation and checking above
assertTrue(tokenInfo.expiry > System.currentTimeMillis()
+ Config.getConfig(process.getProcess()).getRefreshTokenValidityInMillis() - 5000);
assertTrue(tokenInfo.expiry >= createdTime
+ Config.getConfig(process.getProcess()).getRefreshTokenValidityInMillis());
process.kill();
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STOPPED));

55
stress-tests/.gitignore vendored Normal file
View File

@ -0,0 +1,55 @@
# Dependencies
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
package-lock.json
yarn.lock
# Environment variables
.env
.env.local
.env.*.local
# Build output
dist/
build/
out/
# Logs
logs/
*.log
# IDE and editor files
.idea/
.vscode/
*.swp
*.swo
.DS_Store
Thumbs.db
# Testing
coverage/
.nyc_output/
# Temporary files
*.tmp
*.temp
.cache/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
users/

8
stress-tests/.prettierrc Normal file
View File

@ -0,0 +1,8 @@
{
"semi": true,
"trailingComma": "es5",
"singleQuote": true,
"printWidth": 100,
"tabWidth": 2,
"useTabs": false
}

View File

@ -0,0 +1,66 @@
version: '3'
services:
# Note: If you are assigning a custom name to your db service on the line below, make sure it does not contain underscores
db:
image: 'postgres:latest'
environment:
POSTGRES_USER: supertokens
POSTGRES_PASSWORD: supertokens
POSTGRES_DB: supertokens
command: postgres -c shared_preload_libraries='pg_stat_statements' -c pg_stat_statements.track=all -c max_connections=1000 -c shared_buffers=1GB -c synchronous_commit=off -c wal_buffers=16MB -c checkpoint_timeout=30min -c max_wal_size=4GB
ports:
- 5432:5432
networks:
- app_network
restart: unless-stopped
healthcheck:
test: ['CMD', 'pg_isready', '-U', 'supertokens', '-d', 'supertokens']
interval: 5s
timeout: 5s
retries: 5
supertokens:
image: supertokens/supertokens-postgresql
# platform: linux/amd64
depends_on:
db:
condition: service_healthy
ports:
- 3567:3567
environment:
POSTGRESQL_CONNECTION_URI: "postgresql://supertokens:supertokens@db:5432/supertokens"
PASSWORD_HASHING_ALG: "ARGON2"
ARGON2_ITERATIONS: 1
ARGON2_MEMORY_KB: 8
ARGON2_PARALLELISM: 1
ARGON2_HASHING_POOL_SIZE: 8
API_KEYS: "qwertyuiopasdfghjklzxcvbnm"
BULK_MIGRATION_PARALLELISM: "4"
BULK_MIGRATION_BATCH_SIZE: "500"
networks:
- app_network
restart: unless-stopped
healthcheck:
test: >
bash -c 'exec 3<>/dev/tcp/127.0.0.1/3567 && echo -e "GET /hello HTTP/1.1\r\nhost: 127.0.0.1:3567\r\nConnection: close\r\n\r\n" >&3 && cat <&3 | grep "Hello"'
interval: 10s
timeout: 5s
retries: 5
pghero:
image: ankane/pghero
environment:
DATABASE_URL: "postgres://supertokens:supertokens@db:5432/supertokens"
ports:
- 8080:8080
networks:
- app_network
depends_on:
- db
restart: unless-stopped
networks:
app_network:
driver: bridge

27
stress-tests/package.json Normal file
View File

@ -0,0 +1,27 @@
{
"name": "stress-tests",
"version": "1.0.0",
"description": "Stress tests for SuperTokens",
"main": "dist/index.js",
"scripts": {
"build": "tsc",
"start": "node dist/index.js",
"generate-users": "rm -rf users && mkdir -p users && ts-node src/oneMillionUsers/generateUsers.ts",
"one-million-users": "ts-node src/oneMillionUsers/index.ts",
"format": "prettier --write \"**/*.{ts,js,json}\""
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@types/node": "^20.11.24",
"prettier": "^3.5.3",
"ts-node": "^10.9.2",
"typescript": "^5.3.3"
},
"dependencies": {
"@types/uuid": "^10.0.0",
"supertokens-node": "21.1.2",
"uuid": "^11.1.0"
}
}

View File

@ -0,0 +1,143 @@
import * as fs from 'fs';
export const LICENSE_FOR_TEST =
'E1yITHflaFS4BPm7n0bnfFCjP4sJoTERmP0J=kXQ5YONtALeGnfOOe2rf2QZ0mfOh0aO3pBqfF-S0jb0ABpat6pySluTpJO6jieD6tzUOR1HrGjJO=50Ob3mHi21tQH1';
export const createStInstanceForTest = async () => {
return {
deployment_id: '1234567890',
core_url: 'http://localhost:3567',
api_key: 'qwertyuiopasdfghjklzxcvbnm',
};
};
export const deleteStInstance = async (deploymentId: string) => {
// noop
};
export const formatTime = (ms: number): string => {
const seconds = Math.floor(ms / 1000);
if (seconds < 60) {
return `${seconds}s`;
}
const minutes = Math.floor(seconds / 60);
const remainingSeconds = seconds % 60;
return `${minutes}m ${remainingSeconds}s`;
};
export const workInBatches = async <T>(
count: number,
numberOfBatches: number,
work: (idx: number) => Promise<T>
): Promise<T[]> => {
const batchSize = Math.ceil(count / numberOfBatches);
const batches = [];
let workCount = 0;
const st = Date.now();
let done = numberOfBatches;
for (let b = 0; b < numberOfBatches; b++) {
batches.push(
(async () => {
const startIndex = b * batchSize;
const endIndex = Math.min(startIndex + batchSize, count);
const batchResults: T[] = [];
for (let i = startIndex; i < endIndex; i++) {
batchResults.push(await work(i));
workCount++;
}
done--;
return batchResults;
})()
);
}
batches.push(
(async () => {
while (done > 0) {
await new Promise((resolve) => setTimeout(resolve, 5000));
const en = Date.now();
console.log(
` Progress: Time=${formatTime(en - st)}, Completed=${workCount}, Throughput=${Math.round((workCount / (en - st)) * 10000) / 10}/s`
);
}
return [];
})()
);
const results = await Promise.all(batches);
return results.flat();
};
export const setupLicense = async (coreUrl: string, apiKey: string) => {
try {
const response = await fetch(`${coreUrl}/ee/license`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'api-key': apiKey,
},
body: JSON.stringify({
licenseKey: LICENSE_FOR_TEST,
}),
});
if (!response.ok) {
throw new Error(`Failed with status: ${response.status}`);
}
const responseText = await response.text();
console.log('License response:', responseText);
console.log('License key set successfully');
} catch (error) {
console.error('Failed to set license key:', error);
throw error;
}
};
export class StatsCollector {
private static instance: StatsCollector;
private measurements: { title: string; timeMs: number }[] = [];
private constructor() {}
public static getInstance(): StatsCollector {
if (!StatsCollector.instance) {
StatsCollector.instance = new StatsCollector();
}
return StatsCollector.instance;
}
public addMeasurement(title: string, timeMs: number) {
this.measurements.push({ title, timeMs });
}
public getStats() {
return this.measurements;
}
public writeToFile() {
const formattedMeasurements = this.measurements.map((measurement) => ({
title: measurement.title,
ms: measurement.timeMs,
formatted: formatTime(measurement.timeMs),
}));
const stats = {
measurements: formattedMeasurements,
timestamp: new Date().toISOString(),
};
fs.writeFileSync('stats.json', JSON.stringify(stats, null, 2));
}
}
export const measureTime = async <T>(title: string, fn: () => Promise<T>): Promise<T> => {
const st = Date.now();
const result = await fn();
const et = Date.now();
const timeMs = et - st;
console.log(` ${title} took ${formatTime(timeMs)}`);
StatsCollector.getInstance().addMeasurement(title, timeMs);
return result;
};

View File

@ -0,0 +1,24 @@
import SuperTokens from 'supertokens-node';
import AccountLinking from 'supertokens-node/recipe/accountlinking';
import { measureTime, workInBatches } from '../common/utils';
export const doAccountLinking = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[][]
) => {
console.log('\n\n2. Linking accounts');
await measureTime('Linking accounts', async () => {
await workInBatches(users.length, 8, async (idx) => {
const userSet = users[idx]!;
await AccountLinking.createPrimaryUser(
SuperTokens.convertToRecipeUserId(userSet[0].recipeUserId)
);
for (const user of userSet.slice(1)) {
await AccountLinking.linkAccounts(
SuperTokens.convertToRecipeUserId(user.recipeUserId),
userSet[0].recipeUserId
);
}
});
});
};

View File

@ -0,0 +1,18 @@
import SuperTokens from 'supertokens-node';
import UserRoles from 'supertokens-node/recipe/userroles';
import { measureTime, workInBatches } from '../common/utils';
export const addRoles = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[]
) => {
console.log('\n\n4. Adding roles');
await measureTime('Adding roles', async () => {
await UserRoles.createNewRoleOrAddPermissions('admin', ['p1', 'p2']);
await workInBatches(users.length, 8, async (idx) => {
const user = users[idx]!;
await UserRoles.addRoleToUser('public', user.recipeUserId, 'admin');
});
});
};

View File

@ -0,0 +1,19 @@
import SuperTokens from 'supertokens-node';
import Session from 'supertokens-node/recipe/session';
import { measureTime, workInBatches } from '../common/utils';
export const createSessions = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[]
) => {
console.log('\n\n5. Creating sessions');
await measureTime('Creating sessions', async () => {
await workInBatches(users.length, 8, async (idx) => {
const user = users[idx]!;
await Session.createNewSessionWithoutRequestResponse(
'public',
SuperTokens.convertToRecipeUserId(user.recipeUserId),
);
});
});
};

View File

@ -0,0 +1,25 @@
import { measureTime, workInBatches } from '../common/utils';
import SuperTokens from 'supertokens-node';
export const createUserIdMappings = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[]
) => {
console.log('\n\n3. Create user id mappings');
await measureTime('Create user id mappings', async () => {
await workInBatches(users.length, 8, async (idx) => {
const user = users[idx]!;
if (Math.random() < 0.5) {
const newUserId = Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('');
await SuperTokens.createUserIdMapping({
superTokensUserId: user.recipeUserId,
externalUserId: newUserId,
});
user.recipeUserId = newUserId;
}
});
});
};

View File

@ -0,0 +1,128 @@
import EmailPassword from 'supertokens-node/recipe/emailpassword';
import Passwordless from 'supertokens-node/recipe/passwordless';
import ThirdParty from 'supertokens-node/recipe/thirdparty';
import { workInBatches, measureTime } from '../common/utils';
const TOTAL_USERS = 10000;
const createEmailPasswordUsers = async () => {
console.log(` Creating EmailPassword users...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const email =
Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('') + '@example.com';
const createdUser = await EmailPassword.signUp('public', email, 'password');
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
email: email,
};
}
});
};
const createPasswordlessUsersWithEmail = async () => {
console.log(` Creating Passwordless users (with email)...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const email =
Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('') + '@example.com';
const createdUser = await Passwordless.signInUp({
tenantId: 'public',
email,
});
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
email,
};
}
});
};
const createPasswordlessUsersWithPhone = async () => {
console.log(` Creating Passwordless users (with phone)...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const phoneNumber = `+1${Math.floor(Math.random() * 10000000000)}`;
const createdUser = await Passwordless.signInUp({
tenantId: 'public',
phoneNumber,
});
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
phoneNumber,
};
}
});
};
const createThirdPartyUsers = async (thirdPartyId: string) => {
console.log(` Creating ThirdParty (${thirdPartyId}) users...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const email =
Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('') + '@example.com';
const tpUserId = Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('');
const createdUser = await ThirdParty.manuallyCreateOrUpdateUser(
'public',
thirdPartyId,
tpUserId,
email,
true
);
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
email,
};
}
});
};
export const createUsers = async () => {
console.log('\n\n1. Create one million users');
const epUsers = await measureTime('Emailpassword users creation', createEmailPasswordUsers);
const plessEmailUsers = await measureTime(
'Passwordless users (with email) creation',
createPasswordlessUsersWithEmail
);
const plessPhoneUsers = await measureTime(
'Passwordless users (with phone) creation',
createPasswordlessUsersWithPhone
);
const tpUsers1 = await measureTime('ThirdParty users (google) creation', () =>
createThirdPartyUsers('google')
);
const tpUsers2 = await measureTime('ThirdParty users (facebook) creation', () =>
createThirdPartyUsers('facebook')
);
return {
epUsers,
plessEmailUsers,
plessPhoneUsers,
tpUsers1,
tpUsers2,
};
};

View File

@ -0,0 +1,193 @@
import * as fs from 'fs';
import { v4 as uuidv4 } from 'uuid';
const USERS_TO_GENERATE = 1000000;
const USERS_PER_JSON = 10000;
const n = Math.floor(USERS_TO_GENERATE / USERS_PER_JSON);
const generatedEmails = new Set<string>();
const generatedPhoneNumbers = new Set<string>();
const generatedUserIds = new Set<string>();
interface LoginMethod {
tenantIds: string[];
email: string;
recipeId: string;
passwordHash?: string;
hashingAlgorithm?: string;
thirdPartyId?: string;
thirdPartyUserId?: string;
phoneNumber?: string;
isVerified: boolean;
isPrimary: boolean;
timeJoinedInMSSinceEpoch: number;
}
interface User {
externalUserId: string;
userRoles: Array<{
role: string;
tenantIds: string[];
}>;
loginMethods: LoginMethod[];
}
function createEmailLoginMethod(email: string, tenantIds: string[]): LoginMethod {
return {
tenantIds,
email,
recipeId: 'emailpassword',
passwordHash: '$argon2d$v=19$m=12,t=3,p=1$aGI4enNvMmd0Zm0wMDAwMA$r6p7qbr6HD+8CD7sBi4HVw',
hashingAlgorithm: 'argon2',
isVerified: true,
isPrimary: false,
timeJoinedInMSSinceEpoch:
Math.floor(Math.random() * (Date.now() - 3 * 365 * 24 * 60 * 60 * 1000)) +
3 * 365 * 24 * 60 * 60 * 1000,
};
}
function createThirdPartyLoginMethod(email: string, tenantIds: string[]): LoginMethod {
return {
tenantIds,
recipeId: 'thirdparty',
email,
thirdPartyId: 'google',
thirdPartyUserId: String(hashCode(email)),
isVerified: true,
isPrimary: false,
timeJoinedInMSSinceEpoch:
Math.floor(Math.random() * (Date.now() - 3 * 365 * 24 * 60 * 60 * 1000)) +
3 * 365 * 24 * 60 * 60 * 1000,
};
}
function createPasswordlessLoginMethod(
email: string,
tenantIds: string[],
phoneNumber: string
): LoginMethod {
return {
tenantIds,
email,
recipeId: 'passwordless',
phoneNumber,
isVerified: true,
isPrimary: false,
timeJoinedInMSSinceEpoch:
Math.floor(Math.random() * (Date.now() - 3 * 365 * 24 * 60 * 60 * 1000)) +
3 * 365 * 24 * 60 * 60 * 1000,
};
}
function hashCode(str: string): number {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash;
}
return hash;
}
function generateRandomString(length: number, chars: string): string {
let result = '';
for (let i = 0; i < length; i++) {
result += chars.charAt(Math.floor(Math.random() * chars.length));
}
return result;
}
function generateRandomEmail(): string {
return `${generateRandomString(24, 'abcdefghijklmnopqrstuvwxyz')}@example.com`;
}
function generateRandomPhoneNumber(): string {
return `+91${generateRandomString(10, '0123456789')}`;
}
function genUser(): User {
const user: User = {
externalUserId: '',
userRoles: [
{ role: 'role1', tenantIds: ['public'] },
{ role: 'role2', tenantIds: ['public'] },
],
loginMethods: [],
};
let userId = `e-${uuidv4()}`;
while (generatedUserIds.has(userId)) {
userId = `e-${uuidv4()}`;
}
generatedUserIds.add(userId);
user.externalUserId = userId;
const tenantIds = ['public'];
let email = generateRandomEmail();
while (generatedEmails.has(email)) {
email = generateRandomEmail();
}
generatedEmails.add(email);
const loginMethods: LoginMethod[] = [];
// Always add email login method
loginMethods.push(createEmailLoginMethod(email, tenantIds));
// 50% chance to add third party login
if (Math.random() < 0.5) {
loginMethods.push(createThirdPartyLoginMethod(email, tenantIds));
}
// 50% chance to add passwordless login
if (Math.random() < 0.5) {
let phoneNumber = generateRandomPhoneNumber();
while (generatedPhoneNumbers.has(phoneNumber)) {
phoneNumber = generateRandomPhoneNumber();
}
generatedPhoneNumbers.add(phoneNumber);
loginMethods.push(createPasswordlessLoginMethod(email, tenantIds, phoneNumber));
}
// If no methods were added, randomly add one
if (loginMethods.length === 0) {
const methodNumber = Math.floor(Math.random() * 3);
if (methodNumber === 0) {
loginMethods.push(createEmailLoginMethod(email, tenantIds));
} else if (methodNumber === 1) {
loginMethods.push(createThirdPartyLoginMethod(email, tenantIds));
} else {
let phoneNumber = generateRandomPhoneNumber();
while (generatedPhoneNumbers.has(phoneNumber)) {
phoneNumber = generateRandomPhoneNumber();
}
generatedPhoneNumbers.add(phoneNumber);
loginMethods.push(createPasswordlessLoginMethod(email, tenantIds, phoneNumber));
}
}
loginMethods[Math.floor(Math.random() * loginMethods.length)].isPrimary = true;
user.loginMethods = loginMethods;
return user;
}
// Create users directory if it doesn't exist
if (!fs.existsSync('users')) {
fs.mkdirSync('users');
}
for (let i = 0; i < n; i++) {
console.log(`Generating ${USERS_PER_JSON} users for ${i}`);
const users: User[] = [];
for (let j = 0; j < USERS_PER_JSON; j++) {
users.push(genUser());
}
fs.writeFileSync(
`users/users-${i.toString().padStart(4, '0')}.json`,
JSON.stringify({ users }, null, 2)
);
}

View File

@ -0,0 +1,149 @@
import {
createStInstanceForTest,
deleteStInstance,
setupLicense,
StatsCollector,
} from '../common/utils';
import SuperTokens from 'supertokens-node';
import EmailPassword from 'supertokens-node/recipe/emailpassword';
import Passwordless from 'supertokens-node/recipe/passwordless';
import ThirdParty from 'supertokens-node/recipe/thirdparty';
import UserRoles from 'supertokens-node/recipe/userroles';
import Session from 'supertokens-node/recipe/session';
import { createUsers } from './createUsers';
import { doAccountLinking } from './accountLinking';
import { createUserIdMappings } from './createUserIdMappings';
import { addRoles } from './addRoles';
import { createSessions } from './createSessions';
function stInit(connectionURI: string, apiKey: string) {
SuperTokens.init({
appInfo: {
appName: 'SuperTokens',
apiDomain: 'http://localhost:3001',
websiteDomain: 'http://localhost:3000',
apiBasePath: '/auth',
websiteBasePath: '/auth',
},
supertokens: {
connectionURI: connectionURI,
apiKey: apiKey,
},
recipeList: [
EmailPassword.init(),
Passwordless.init({
contactMethod: 'EMAIL_OR_PHONE',
flowType: 'USER_INPUT_CODE',
}),
ThirdParty.init({
signInAndUpFeature: {
providers: [
{
config: { thirdPartyId: 'google' },
},
{
config: { thirdPartyId: 'facebook' },
},
],
},
}),
UserRoles.init(),
Session.init(),
],
});
}
async function main() {
const deployment = await createStInstanceForTest();
console.log(`Deployment created: ${deployment.core_url}`);
try {
stInit(deployment.core_url, deployment.api_key);
await setupLicense(deployment.core_url, deployment.api_key);
// 1. Create one million users
const users = await createUsers();
// Randomly create groups of users for linking
const allUsers: ({ recipeUserId: string; email?: string; phoneNumber?: string } | undefined)[] =
[
...users.epUsers,
...users.plessEmailUsers,
...users.plessPhoneUsers,
...users.tpUsers1,
...users.tpUsers2,
];
const usersToLink: { recipeUserId: string; email?: string; phoneNumber?: string }[][] = [];
while (allUsers.length > 0) {
const userSet: { recipeUserId: string; email?: string; phoneNumber?: string }[] = [];
const numAccounts = Math.min(Math.floor(Math.random() * 5 + 1), allUsers.length);
for (let i = 0; i < numAccounts; i++) {
const randomIndex = Math.floor(Math.random() * allUsers.length);
userSet.push(allUsers[randomIndex]!);
allUsers.splice(randomIndex, 1);
}
usersToLink.push(userSet);
}
// 2. Link accounts
await doAccountLinking(usersToLink);
// 3. Create user id mappings
const allUsersForMapping = [
...users.epUsers,
...users.plessEmailUsers,
...users.plessPhoneUsers,
...users.tpUsers1,
...users.tpUsers2,
].filter((user) => user !== undefined) as {
recipeUserId: string;
email?: string;
phoneNumber?: string;
}[];
await createUserIdMappings(allUsersForMapping);
// 4. Add roles
await addRoles(allUsersForMapping);
// 5. Create sessions
await createSessions(allUsersForMapping);
// 6. List all users
console.log('\n\n6. Listing all users');
let userCount = 0;
let paginationToken: string | undefined;
while (true) {
const result = await SuperTokens.getUsersNewestFirst({
tenantId: 'public',
paginationToken,
});
for (const user of result.users) {
userCount++;
}
paginationToken = result.nextPaginationToken;
if (result.nextPaginationToken === undefined) break;
}
console.log(`Users count: ${userCount}`);
// 7. Count users
console.log('\n\n7. Count users');
const total = await SuperTokens.getUserCount();
console.log(`Users count: ${total}`);
// Write stats to file
StatsCollector.getInstance().writeToFile();
console.log('\nStats written to stats.json');
} catch (error) {
console.error('An error occurred during execution:', error);
throw error;
} finally {
await deleteStInstance(deployment.deployment_id);
}
}
main();

View File

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "es2016",
"module": "commonjs",
"lib": ["ES2020"],
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "./dist",
"rootDir": "./src",
"types": ["node"]
},
"include": ["src/**/*"],
"exclude": ["node_modules"]
}