Compare commits

...

5 Commits
master ... 7.0

Author SHA1 Message Date
Sattvik Chakravarthy 97913a1057
fix: For backport release 7.0 (#1177)
* fix: workflows

* fix: test

* fix: stress tests

* fix: account linking in stress tests
2025-08-12 08:21:56 +05:30
Tamas Soltesz 476ac2b525
backport: logs to otel (#1168)
fix: add implementationDependencies.json dependencies

chore: build version and changelog

fix: add missing config and devConfig entries

fix: remove accidentally merged lines
2025-08-11 16:33:14 +05:30
rishabhpoddar e4f22097fd adding dev-v7.0.19 tag to this commit to ensure building 2024-03-22 15:11:38 +05:30
Sattvik Chakravarthy a690b4a818
fix: test (#971) 2024-03-22 12:21:52 +05:30
Sattvik Chakravarthy ffefe748ea
fix: backports to core 7.0 (#969) 2024-03-21 23:58:12 +05:30
73 changed files with 2958 additions and 1166 deletions

View File

@ -1,57 +0,0 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,57 +0,0 @@
FROM ubuntu:18.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,57 +0,0 @@
FROM ubuntu:22.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN [ -d /var/run/mysqld ] || mkdir -p /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,90 +0,0 @@
version: 2.1
orbs:
slack: circleci/slack@3.4.2
jobs:
test:
docker:
- image: rishabhpoddar/supertokens_core_testing
- image: mongo
environment:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: root
resource_class: large
parameters:
plugin:
type: string
steps:
- checkout
- run: echo $'\n[mysqld]\ncharacter_set_server=utf8mb4\nmax_connections=10000' >> /etc/mysql/mysql.cnf
- run: apt-get -y -q install postgresql-9.5 postgresql-client-9.5 postgresql-contrib-9.5 sudo
- run: echo "host all all 0.0.0.0/0 md5" >> /etc/postgresql/9.5/main/pg_hba.conf
- run: echo "listen_addresses='*'" >> /etc/postgresql/9.5/main/postgresql.conf
- run: sed -i 's/^#*\s*max_connections\s*=.*/max_connections = 10000/' /etc/postgresql/9.5/main/postgresql.conf
- run: (cd .circleci/ && ./doTests.sh << parameters.plugin >>)
- slack/status
mark-passed:
docker:
- image: rishabhpoddar/supertokens_core_testing
steps:
- checkout
- run: (cd .circleci && ./markPassed.sh)
- slack/status
workflows:
version: 2
tagged-build:
jobs:
- test:
plugin: sqlite
name: test-sqlite
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
- test:
plugin: mongodb
name: test-mongodb
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
- test:
plugin: postgresql
name: test-postgresql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
- test:
plugin: mysql
name: test-mysql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
- mark-passed:
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
requires:
- test-sqlite
- test-mongodb
- test-postgresql
- test-mysql

View File

@ -1,242 +0,0 @@
function cleanup {
if test -f "pluginInterfaceExactVersionsOutput"; then
rm pluginInterfaceExactVersionsOutput
fi
}
trap cleanup EXIT
cleanup
pluginToTest=$1
pinnedDBJson=$(curl -s -X GET \
'https://api.supertokens.io/0/plugin/pinned?planType=FREE' \
-H 'api-version: 0')
pinnedDBLength=$(echo "$pinnedDBJson" | jq ".plugins | length")
pinnedDBArray=$(echo "$pinnedDBJson" | jq ".plugins")
echo "got pinned dbs..."
pluginInterfaceJson=$(cat ../pluginInterfaceSupported.json)
pluginInterfaceLength=$(echo "$pluginInterfaceJson" | jq ".versions | length")
pluginInterfaceArray=$(echo "$pluginInterfaceJson" | jq ".versions")
echo "got plugin interface relations"
coreDriverJson=$(cat ../coreDriverInterfaceSupported.json)
coreDriverArray=$(echo "$coreDriverJson" | jq ".versions")
echo "got core driver relations"
./getPluginInterfaceExactVersions.sh "$pluginInterfaceLength" "$pluginInterfaceArray"
if [[ $? -ne 0 ]]
then
echo "all plugin interfaces found... failed. exiting!"
exit 1
else
echo "all plugin interfaces found..."
fi
# get core version
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PUT \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"pluginInterfaces\": $pluginInterfaceArray,
\"coreDriverInterfaces\": $coreDriverArray
}")
if [ "$responseStatus" -ne "200" ]
then
echo "failed core PUT API status code: $responseStatus. Exiting!"
exit 1
fi
someTestsRan=false
while read -u 10 line
do
if [[ $line = "" ]]; then
continue
fi
i=0
currTag=$(echo "$line" | jq .tag)
currTag=$(echo "$currTag" | tr -d '"')
currVersion=$(echo "$line" | jq .version)
currVersion=$(echo "$currVersion" | tr -d '"')
piX=$(cut -d'.' -f1 <<<"$currVersion")
piY=$(cut -d'.' -f2 <<<"$currVersion")
piVersion="$piX.$piY"
while [ $i -lt "$pinnedDBLength" ]; do
someTestsRan=true
currPinnedDb=$(echo "$pinnedDBArray" | jq ".[$i]")
currPinnedDb=$(echo "$currPinnedDb" | tr -d '"')
i=$((i+1))
if [[ $currPinnedDb == $pluginToTest ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion ====="
echo ""
echo ""
echo ""
echo ""
echo ""
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/dependency/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$piVersion&pluginName=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .plugin) == "null" ]]
then
echo "fetching latest X.Y version for $currPinnedDb given plugin-interface X.Y version: $piVersion gave response: $response"
exit 1
fi
pinnedDbVersionX2=$(echo $response | jq .plugin | tr -d '"')
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$pinnedDbVersionX2&name=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .tag) == "null" ]]
then
echo "fetching latest X.Y.Z version for $currPinnedDb, X.Y version: $pinnedDbVersionX2 gave response: $response"
exit 1
fi
pinnedDbVersionTag=$(echo "$response" | jq .tag | tr -d '"')
pinnedDbVersion=$(echo "$response" | jq .version | tr -d '"')
./startDb.sh "$currPinnedDb"
fi
cd ../../
git clone git@github.com:supertokens/supertokens-root.git
cd supertokens-root
update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-15.0.1/bin/java" 2
update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-15.0.1/bin/javac" 2
coreX=$(cut -d'.' -f1 <<<"$coreVersion")
coreY=$(cut -d'.' -f2 <<<"$coreVersion")
if [[ $currPinnedDb == "sqlite" ]]
then
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion" > modules.txt
else
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion\n$currPinnedDb-plugin,$pinnedDbVersionX2" > modules.txt
fi
./loadModules
cd supertokens-core
git checkout dev-v$coreVersion
cd ../supertokens-plugin-interface
git checkout $currTag
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
cd ../supertokens-$currPinnedDb-plugin
git checkout $pinnedDbVersionTag
fi
cd ../
echo $SUPERTOKENS_API_KEY > apiPassword
./startTestingEnv --cicd
if [[ $? -ne 0 ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion FAILED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cat logs/*
cd ../project/
echo "test failed... exiting!"
exit 1
fi
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion SUCCEEDED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cd ../
rm -rf supertokens-root
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
curl -o supertokens.zip -s -X GET \
"https://api.supertokens.io/0/app/download?pluginName=$currPinnedDb&os=linux&mode=DEV&binary=FREE&targetCore=$coreVersion&targetPlugin=$pinnedDbVersion" \
-H 'api-version: 0'
unzip supertokens.zip -d .
rm supertokens.zip
cd supertokens
../project/.circleci/testCli.sh
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
cd ../
fi
rm -rf supertokens
cd project/.circleci
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
./stopDb.sh $currPinnedDb
fi
fi
done
done 10<pluginInterfaceExactVersionsOutput
if [[ $someTestsRan = "true" ]]
then
echo "tests ran successfully"
else
echo "no test ran"
exit 1
fi

View File

@ -1,19 +0,0 @@
# args: <length of array> <array like ["0.0", "0.1"]>
touch pluginInterfaceExactVersionsOutput
i=0
while [ $i -lt $1 ]; do
currVersion=`echo $2 | jq ".[$i]"`
currVersion=`echo $currVersion | tr -d '"'`
i=$((i+1))
# now we have the current version like 0.0.
# We now have to find something that matches dev-v0.0.* or v0.0.*
response=`curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$currVersion" \
-H 'api-version: 0'`
if [[ `echo $response | jq .tag` == "null" ]]
then
echo $response
exit 1
fi
echo $response >> pluginInterfaceExactVersionsOutput
done

View File

@ -1,29 +0,0 @@
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
echo "calling /core PATCH to make testing passed"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PATCH \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"testPassed\": true
}")
if [ "$responseStatus" -ne "200" ]
then
echo "patch api failed"
exit 1
fi

View File

@ -1 +0,0 @@
chown -R mysql:mysql /var/lib/mysql /var/run/mysqld && service mysql start

View File

@ -1,113 +0,0 @@
case $1 in
mysql)
(cd / && ./runMySQL.sh)
mysql -u root --password=root -e "CREATE DATABASE supertokens;"
mysql -u root --password=root -e "CREATE DATABASE st0;"
mysql -u root --password=root -e "CREATE DATABASE st1;"
mysql -u root --password=root -e "CREATE DATABASE st2;"
mysql -u root --password=root -e "CREATE DATABASE st3;"
mysql -u root --password=root -e "CREATE DATABASE st4;"
mysql -u root --password=root -e "CREATE DATABASE st5;"
mysql -u root --password=root -e "CREATE DATABASE st6;"
mysql -u root --password=root -e "CREATE DATABASE st7;"
mysql -u root --password=root -e "CREATE DATABASE st8;"
mysql -u root --password=root -e "CREATE DATABASE st9;"
mysql -u root --password=root -e "CREATE DATABASE st10;"
mysql -u root --password=root -e "CREATE DATABASE st11;"
mysql -u root --password=root -e "CREATE DATABASE st12;"
mysql -u root --password=root -e "CREATE DATABASE st13;"
mysql -u root --password=root -e "CREATE DATABASE st14;"
mysql -u root --password=root -e "CREATE DATABASE st15;"
mysql -u root --password=root -e "CREATE DATABASE st16;"
mysql -u root --password=root -e "CREATE DATABASE st17;"
mysql -u root --password=root -e "CREATE DATABASE st18;"
mysql -u root --password=root -e "CREATE DATABASE st19;"
mysql -u root --password=root -e "CREATE DATABASE st20;"
mysql -u root --password=root -e "CREATE DATABASE st21;"
mysql -u root --password=root -e "CREATE DATABASE st22;"
mysql -u root --password=root -e "CREATE DATABASE st23;"
mysql -u root --password=root -e "CREATE DATABASE st24;"
mysql -u root --password=root -e "CREATE DATABASE st25;"
mysql -u root --password=root -e "CREATE DATABASE st26;"
mysql -u root --password=root -e "CREATE DATABASE st27;"
mysql -u root --password=root -e "CREATE DATABASE st28;"
mysql -u root --password=root -e "CREATE DATABASE st29;"
mysql -u root --password=root -e "CREATE DATABASE st30;"
mysql -u root --password=root -e "CREATE DATABASE st31;"
mysql -u root --password=root -e "CREATE DATABASE st32;"
mysql -u root --password=root -e "CREATE DATABASE st33;"
mysql -u root --password=root -e "CREATE DATABASE st34;"
mysql -u root --password=root -e "CREATE DATABASE st35;"
mysql -u root --password=root -e "CREATE DATABASE st36;"
mysql -u root --password=root -e "CREATE DATABASE st37;"
mysql -u root --password=root -e "CREATE DATABASE st38;"
mysql -u root --password=root -e "CREATE DATABASE st39;"
mysql -u root --password=root -e "CREATE DATABASE st40;"
mysql -u root --password=root -e "CREATE DATABASE st41;"
mysql -u root --password=root -e "CREATE DATABASE st42;"
mysql -u root --password=root -e "CREATE DATABASE st43;"
mysql -u root --password=root -e "CREATE DATABASE st44;"
mysql -u root --password=root -e "CREATE DATABASE st45;"
mysql -u root --password=root -e "CREATE DATABASE st46;"
mysql -u root --password=root -e "CREATE DATABASE st47;"
mysql -u root --password=root -e "CREATE DATABASE st48;"
mysql -u root --password=root -e "CREATE DATABASE st49;"
mysql -u root --password=root -e "CREATE DATABASE st50;"
;;
postgresql)
/etc/init.d/postgresql start
sudo -u postgres psql --command "CREATE USER root WITH SUPERUSER PASSWORD 'root';"
createdb
psql -c "create database supertokens;"
psql -c "create database st0;"
psql -c "create database st1;"
psql -c "create database st2;"
psql -c "create database st3;"
psql -c "create database st4;"
psql -c "create database st5;"
psql -c "create database st6;"
psql -c "create database st7;"
psql -c "create database st8;"
psql -c "create database st9;"
psql -c "create database st10;"
psql -c "create database st11;"
psql -c "create database st12;"
psql -c "create database st13;"
psql -c "create database st14;"
psql -c "create database st15;"
psql -c "create database st16;"
psql -c "create database st17;"
psql -c "create database st18;"
psql -c "create database st19;"
psql -c "create database st20;"
psql -c "create database st21;"
psql -c "create database st22;"
psql -c "create database st23;"
psql -c "create database st24;"
psql -c "create database st25;"
psql -c "create database st26;"
psql -c "create database st27;"
psql -c "create database st28;"
psql -c "create database st29;"
psql -c "create database st30;"
psql -c "create database st31;"
psql -c "create database st32;"
psql -c "create database st33;"
psql -c "create database st34;"
psql -c "create database st35;"
psql -c "create database st36;"
psql -c "create database st37;"
psql -c "create database st38;"
psql -c "create database st39;"
psql -c "create database st40;"
psql -c "create database st41;"
psql -c "create database st42;"
psql -c "create database st43;"
psql -c "create database st44;"
psql -c "create database st45;"
psql -c "create database st46;"
psql -c "create database st47;"
psql -c "create database st48;"
psql -c "create database st49;"
psql -c "create database st50;"
esac

View File

@ -1,8 +0,0 @@
case $1 in
mysql)
service mysql stop
;;
postgresql)
service postgresql stop
;;
esac

View File

@ -1,72 +0,0 @@
# inside supertokens downloaded zip
./install
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens start --port=8888
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
sed -i 's/# mysql_user:/mysql_user: root/g' /usr/lib/supertokens/config.yaml
sed -i 's/# mysql_password:/mysql_password: root/g' /usr/lib/supertokens/config.yaml
sed -i 's/# mongodb_connection_uri:/mongodb_connection_uri: mongodb:\/\/root:root@localhost:27017/g' /usr/lib/supertokens/config.yaml
sed -i 's/# disable_telemetry:/disable_telemetry: true/g' /usr/lib/supertokens/config.yaml
supertokens start --port=8889
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8889/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8888/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens stop
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens uninstall
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi

View File

@ -0,0 +1,56 @@
import json
import os
import http.client
def register_core_version(supertokens_api_key, core_version, plugin_interface_array, core_driver_array):
print("Core Version: ", core_version)
print("Plugin Interface Array: ", plugin_interface_array)
print("Core Driver Array: ", core_driver_array)
conn = http.client.HTTPSConnection("api.supertokens.io")
payload = {
"password": supertokens_api_key,
"planType": "FREE",
"version": core_version,
"pluginInterfaces": plugin_interface_array,
"coreDriverInterfaces": core_driver_array
}
headers = {
'Content-Type': 'application/json',
'api-version': '0'
}
conn.request("PUT", "/0/core", json.dumps(payload), headers)
response = conn.getresponse()
if response.status != 200:
print(f"failed core PUT API status code: {response.status}. Exiting!")
exit(1)
conn.close()
def read_core_version():
with open('build.gradle', 'r') as file:
for line in file:
if 'version =' in line:
return line.split('=')[1].strip().strip("'\"")
raise Exception("Could not find version in build.gradle")
core_version = read_core_version()
with open('pluginInterfaceSupported.json', 'r') as fd:
plugin_interface_array = json.load(fd)['versions']
with open('coreDriverInterfaceSupported.json', 'r') as fd:
core_driver_array = json.load(fd)['versions']
register_core_version(
supertokens_api_key=os.environ.get("SUPERTOKENS_API_KEY"),
core_version=core_version,
plugin_interface_array=plugin_interface_array,
core_driver_array=core_driver_array
)

View File

@ -0,0 +1,68 @@
import json
import os
import subprocess
import http.client
def register_plugin_version(supertokens_api_key, plugin_version, plugin_interface_array, plugin_name):
print("Plugin Version: ", plugin_version)
print("Plugin Interface Array: ", plugin_interface_array)
print("Plugin Name: ", plugin_name)
conn = http.client.HTTPSConnection("api.supertokens.io")
payload = {
"password": supertokens_api_key,
"planType": "FREE",
"version": plugin_version,
"pluginInterfaces": plugin_interface_array,
"name": plugin_name
}
headers = {
'Content-Type': 'application/json',
'api-version': '0'
}
conn.request("PUT", "/0/plugin", json.dumps(payload), headers)
response = conn.getresponse()
if response.status != 200:
print(f"failed plugin PUT API status code: {response.status}. Exiting!")
print(f"response: {str(response.read())}")
exit(1)
conn.close()
def read_plugin_version():
with open('build.gradle', 'r') as file:
for line in file:
if 'version =' in line:
return line.split('=')[1].strip().strip("'\"")
raise Exception("Could not find version in build.gradle")
plugin_version = read_plugin_version()
with open('pluginInterfaceSupported.json', 'r') as fd:
plugin_interface_array = json.load(fd)['versions']
def check_if_tag_exists(tag):
try:
result = subprocess.run(['git', 'tag', '-l', tag], capture_output=True, text=True)
return tag in result.stdout
except subprocess.CalledProcessError:
print(f"Error checking for tag {tag}")
return False
dev_tag = f"dev-v{plugin_version}"
if not check_if_tag_exists(dev_tag):
print(f"Tag {dev_tag} does not exist. Exiting!")
exit(0)
register_plugin_version(
supertokens_api_key=os.environ.get("SUPERTOKENS_API_KEY"),
plugin_version=plugin_version,
plugin_interface_array=plugin_interface_array,
plugin_name=os.environ.get("PLUGIN_NAME")
)

39
.github/helpers/release-docker.sh vendored Normal file
View File

@ -0,0 +1,39 @@
#!/bin/bash
set -e
# Check for required arguments
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <source-image:tag> <target-image:tag>"
exit 1
fi
SOURCE_IMAGE="$1"
TARGET_IMAGE="$2"
# Platforms to support
PLATFORMS=("linux/amd64" "linux/arm64")
TEMP_IMAGES=()
# Pull, retag, and push platform-specific images
for PLATFORM in "${PLATFORMS[@]}"; do
ARCH=$(echo $PLATFORM | cut -d'/' -f2)
TEMP_TAG="${TARGET_IMAGE}-${ARCH}"
TEMP_IMAGES+=("$TEMP_TAG")
echo "Pulling $SOURCE_IMAGE for $PLATFORM..."
docker pull --platform $PLATFORM "$SOURCE_IMAGE"
echo "Tagging as $TEMP_TAG..."
docker tag "$SOURCE_IMAGE" "$TEMP_TAG"
echo "Pushing $TEMP_TAG..."
docker push "$TEMP_TAG"
done
# Create and push manifest for multi-arch image
echo "Creating and pushing multi-arch manifest for $TARGET_IMAGE..."
docker manifest create "$TARGET_IMAGE" "${TEMP_IMAGES[@]}"
docker manifest push "$TARGET_IMAGE"
echo "✅ Multi-arch image pushed as $TARGET_IMAGE"

55
.github/helpers/wait-for-docker.py vendored Normal file
View File

@ -0,0 +1,55 @@
import http.client
import json
import time
import os
import sys
REPO = "supertokens/supertokens-core"
SHA = os.environ.get("GITHUB_SHA")
NAME = os.environ.get("WORKFLOW_NAME", "Publish Dev Docker Image")
st = time.time()
def get_latest_actions():
conn = http.client.HTTPSConnection("api.github.com")
url = f"/repos/{REPO}/actions/runs"
headers = {"User-Agent": "Python-http.client"}
conn.request("GET", url, headers=headers)
response = conn.getresponse()
if response.status == 200:
data = response.read()
runs = json.loads(data)['workflow_runs']
found = False
for run in runs:
if run['head_sha'] == SHA and run['name'] == NAME:
found = True
break
if not found:
print("No matching workflow run found.")
sys.exit(1)
if run["status"] == "completed":
if run["conclusion"] == "success":
print("Workflow completed successfully.")
return True
else:
print(f"Workflow failed with conclusion: {run['conclusion']}")
sys.exit(1)
else:
print(f"Failed to fetch workflow runs: {response.status} {response.reason}")
sys.exit(1)
return False
time.sleep(30) # Wait for 30 seconds before checking
while not get_latest_actions():
print("Waiting for the latest actions to complete...")
time.sleep(10)
if time.time() - st > 600:
print("Timed out waiting for the latest actions.")
sys.exit(1)

107
.github/workflows/add-dev-tag.yml vendored Normal file
View File

@ -0,0 +1,107 @@
name: Add dev tags for release
on:
workflow_dispatch:
inputs:
core-version:
description: 'Core version'
required: true
type: string
plugin-interface-version:
description: 'Plugin interface version'
required: true
type: string
new-release-for-plugin-interface:
description: 'New release for plugin interface'
required: true
type: boolean
postgresql-plugin-version:
description: 'Postgres plugin version'
required: true
new-release-for-postgresql-plugin:
description: 'New release for postgres plugin'
required: true
type: boolean
jobs:
dependency-branches:
name: Dependency Branches
environment: publish
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: add-dev-tag
core-version: ${{ github.event.inputs.core-version }}
plugin-interface-version: ${{ github.event.inputs.plugin-interface-version }}
postgresql-plugin-version: ${{ github.event.inputs.postgresql-plugin-version }}
add-dev-tag:
environment: publish
runs-on: ubuntu-latest
needs: dependency-branches
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-core.git
cd supertokens-core
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['core'] }}
- name: Checkout supertokens-plugin-interface
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-plugin-interface.git
cd supertokens-plugin-interface
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- name: Checkout supertokens-postgresql-plugin
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-postgresql-plugin.git
cd supertokens-postgresql-plugin
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['postgresql'] }}
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
postgresql-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Git config
run: |
git config --global user.name "Supertokens Bot"
git config --global user.email "<>"
- name: Add dev tag to plugin interface
if: ${{ github.event.inputs.new-release-for-plugin-interface == 'true' }}
run: |
echo "Adding dev tag to plugin interface"
cd supertokens-root/supertokens-plugin-interface
./addDevTag
- name: Add dev tag to postgres plugin
if: ${{ github.event.inputs.new-release-for-postgresql-plugin == 'true' }}
run: |
echo "Adding dev tag to postgres plugin"
cd supertokens-root/supertokens-postgresql-plugin
./addDevTag
- name: Add dev tag to core
run: |
echo "Adding dev tag to core"
cd supertokens-root/supertokens-core
./addDevTag

153
.github/workflows/dev-tag.yml vendored Normal file
View File

@ -0,0 +1,153 @@
name: Checks for release
on:
push:
branches:
- '[0-9]+.[0-9]+'
tags:
- 'dev-*'
jobs:
dependency-versions:
name: Dependency Versions
runs-on: ubuntu-latest
outputs:
versions: ${{ steps.result.outputs.versions }}
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
with:
run-for: PR
id: result
new-core-version:
environment: publish
name: New core version
runs-on: ubuntu-latest
needs: [dependency-versions]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Run script
env:
SUPERTOKENS_API_KEY: ${{ secrets.SUPERTOKENS_API_KEY }}
run: |
python .github/helpers/register-new-core-version.py
new-plugin-versions:
environment: publish
name: New plugin versions
runs-on: ubuntu-latest
needs: [dependency-versions]
strategy:
fail-fast: false
matrix:
plugin:
- postgresql
# no longer supported
# - mysql
# - mongodb
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Checkout
uses: actions/checkout@v4
with:
path: ./supertokens-plugin
repository: supertokens/supertokens-${{ matrix.plugin }}-plugin
ref: ${{ fromJson(needs.dependency-versions.outputs.branches)[matrix.plugin] }}
fetch-depth: 0
fetch-tags: true
- name: Run script
env:
SUPERTOKENS_API_KEY: ${{ secrets.SUPERTOKENS_API_KEY }}
PLUGIN_NAME: ${{ matrix.plugin }}
run: |
cd supertokens-plugin
python ../.github/helpers/register-new-plugin-version.py
unit-tests:
name: Run unit tests
needs: [new-core-version, new-plugin-versions]
uses: ./.github/workflows/unit-test.yml
wait-for-docker:
name: Wait for Docker
runs-on: ubuntu-latest
needs: [new-core-version, new-plugin-versions]
outputs:
tag: ${{ steps.set_tag.outputs.TAG }}
steps:
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Checkout
uses: actions/checkout@v4
- name: Wait for Docker build
env:
SHA: ${{ github.sha }}
run: |
python .github/helpers/wait-for-docker.py
- name: set tag
id: set_tag
run: |
echo "TAG=${GITHUB_REF}" | sed 's/refs\/heads\///g' | sed 's/\//_/g' >> $GITHUB_OUTPUT
stress-tests:
needs: [wait-for-docker]
uses: ./.github/workflows/stress-tests.yml
with:
tag: ${{ needs.wait-for-docker.outputs.tag }}
mark-as-passed:
environment: publish
needs: [dependency-versions, unit-tests, stress-tests]
name: Mark as passed
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
plugin:
- sqlite
- postgresql
# no longer supported
# - mysql
# - mongodb
steps:
- name: Mark plugin as passed
if: matrix.plugin != 'sqlite' && fromJson(needs.dependency-versions.outputs.versions)[matrix.plugin] != ''
uses: muhfaris/request-action@main
with:
url: https://api.supertokens.io/0/plugin
method: PATCH
headers: |
{
"Content-Type": "application/json",
"api-version": "0"
}
body: |
{
"password": "${{ secrets.SUPERTOKENS_API_KEY }}",
"version": "${{ fromJson(needs.dependency-versions.outputs.versions)[matrix.plugin] }}",
"planType": "FREE",
"name": "${{ matrix.plugin }}",
"testPassed": true
}
- name: Mark core as passed
if: matrix.plugin == 'sqlite' && fromJson(needs.dependency-versions.outputs.versions)['core'] != ''
uses: muhfaris/request-action@main
with:
url: https://api.supertokens.io/0/core
method: PATCH
headers: |
{
"Content-Type": "application/json",
"api-version": "0"
}
body: |
{
"password": "${{ secrets.SUPERTOKENS_API_KEY }}",
"version": "${{ fromJson(needs.dependency-versions.outputs.versions)['core'] }}",
"planType": "FREE",
"testPassed": true
}

148
.github/workflows/do-release.yml vendored Normal file
View File

@ -0,0 +1,148 @@
name: Do Release
on:
workflow_dispatch:
inputs:
core-version:
description: 'Core version'
required: true
type: string
plugin-interface-version:
description: 'Plugin interface version'
required: true
type: string
new-release-for-plugin-interface:
description: 'New release for plugin interface'
required: true
type: boolean
postgresql-plugin-version:
description: 'Postgres plugin version'
required: true
new-release-for-postgresql-plugin:
description: 'New release for postgres plugin'
required: true
type: boolean
is-latest-release:
description: 'Is this the latest release?'
required: true
type: boolean
jobs:
dependency-branches:
name: Dependency Branches
environment: publish
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
versions: ${{ steps.result.outputs.versions }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: add-dev-tag
core-version: ${{ github.event.inputs.core-version }}
plugin-interface-version: ${{ github.event.inputs.plugin-interface-version }}
postgresql-plugin-version: ${{ github.event.inputs.postgresql-plugin-version }}
release-docker:
environment: publish
name: Release Docker
runs-on: ubuntu-latest
needs: dependency-branches
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Tag and Push Docker Image
run: |
tag=${{ github.event.inputs.core-version }}
major=$(echo $tag | cut -d. -f1)
minor=$(echo $tag | cut -d. -f1,2)
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:$major
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:$minor
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:$tag
if [ "${{ github.event.inputs.is-latest-release }}" == "true" ]; then
bash .github/helpers/release-docker.sh supertokens/supertokens-dev-postgresql:$minor supertokens/supertokens-postgresql:latest
fi
add-release-tag:
environment: publish
runs-on: ubuntu-latest
needs: [dependency-branches, release-docker]
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-core.git
cd supertokens-core
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['core'] }}
- name: Checkout supertokens-plugin-interface
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-plugin-interface.git
cd supertokens-plugin-interface
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- name: Checkout supertokens-postgresql-plugin
run: |
cd supertokens-root
git clone https://${{ secrets.GH_TOKEN }}@github.com/supertokens/supertokens-postgresql-plugin.git
cd supertokens-postgresql-plugin
git checkout ${{ fromJson(needs.dependency-branches.outputs.branches)['postgresql'] }}
- name: Add release password
run: |
cd supertokens-root
echo "${{ secrets.SUPERTOKENS_API_KEY }}" > releasePassword
echo "${{ secrets.SUPERTOKENS_API_KEY }}" > apiPassword
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
postgresql-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Git config
run: |
git config --global user.name "Supertokens Bot"
git config --global user.email "<>"
- name: Add release tag to plugin interface
if: ${{ github.event.inputs.new-release-for-plugin-interface == 'true' }}
run: |
echo "Adding release tag to plugin interface"
cd supertokens-root/supertokens-plugin-interface
./addReleaseTag
- name: Add release tag to postgres plugin
if: ${{ github.event.inputs.new-release-for-postgresql-plugin == 'true' }}
run: |
echo "Adding release tag to postgres plugin"
cd supertokens-root/supertokens-postgresql-plugin
./addReleaseTag
- name: Add release tag to core
run: |
echo "Adding release tag to core"
cd supertokens-root/supertokens-core
./addReleaseTag

View File

@ -1,15 +0,0 @@
name: "Enforcing changelog in PRs Workflow"
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review, labeled, unlabeled]
jobs:
# Enforces the update of a changelog file on every pull request
changelog:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'

View File

@ -1,20 +0,0 @@
name: "Lint PR Title"
on:
pull_request:
types:
- opened
- reopened
- edited
- synchronize
jobs:
pr-title:
name: Lint PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true

27
.github/workflows/pr-checks.yml vendored Normal file
View File

@ -0,0 +1,27 @@
name: PR Checks
on:
pull_request:
types: [ opened, synchronize, reopened, ready_for_review, labeled, unlabeled ]
jobs:
pr-title:
name: Lint PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true
changelog:
name: Enforce Changelog
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'
unit-tests:
name: Run unit tests
uses: ./.github/workflows/unit-test.yml

102
.github/workflows/publish-dev-docker.yml vendored Normal file
View File

@ -0,0 +1,102 @@
name: Publish Dev Docker Image
on:
push:
branches:
- "**"
tags:
- 'dev-*'
jobs:
dependency-branches:
name: Dependency Branches
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: PR
docker:
name: Docker
runs-on: ubuntu-latest
needs: dependency-branches
outputs:
tag: ${{ steps.set_tag.outputs.TAG }}
strategy:
fail-fast: false
matrix:
plugin:
- postgresql
# no longer supported
# - mysql
# - mongodb
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-plugin-interface
path: ./supertokens-root/supertokens-plugin-interface
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- uses: actions/checkout@v2
if: matrix.plugin != 'sqlite'
with:
repository: supertokens/supertokens-${{ matrix.plugin }}-plugin
path: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)[matrix.plugin] }}
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
${{ matrix.plugin }}-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Generate config file
run: |
cd supertokens-root
touch config_temp.yaml
cat supertokens-core/config.yaml >> config_temp.yaml
cat supertokens-${{ matrix.plugin }}-plugin/config.yaml >> config_temp.yaml
mv config_temp.yaml config.yaml
- name: set tag
id: set_tag
run: |
echo "TAG=${GITHUB_REF}" | sed 's/refs\/heads\///g' | sed 's/\//_/g' >> $GITHUB_OUTPUT
-
name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push
uses: docker/build-push-action@v6
with:
push: true
context: ./supertokens-root
tags: supertokens/supertokens-dev-${{ matrix.plugin }}:${{ steps.set_tag.outputs.TAG }}
file: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin/.github/helpers/docker/Dockerfile
platforms: linux/amd64,linux/arm64

47
.github/workflows/stress-tests.yml vendored Normal file
View File

@ -0,0 +1,47 @@
name: Stress Tests
on:
workflow_call:
inputs:
tag:
description: 'Docker image tag to use'
required: true
type: string
jobs:
stress-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
- name: Install dependencies
run: |
cd stress-tests
npm install
- name: Update Docker image in compose
run: |
cd stress-tests
sed -i 's|supertokens/supertokens-postgresql|supertokens/supertokens-dev-postgresql:${{ inputs.tag }}|' docker-compose.yml
cat docker-compose.yml
- name: Bring up the services
run: |
cd stress-tests
docker compose up -d
- name: Generate user jsons
run: |
cd stress-tests
npm run generate-users
- name: Run one million users test
id: one-million-users
run: |
cd stress-tests
npm run one-million-users | tee stress-tests.log
- name: Display Test Statistics
run: |
echo "## Stress Test Results" >> $GITHUB_STEP_SUMMARY
echo "| Test | Duration |" >> $GITHUB_STEP_SUMMARY
echo "|------|----------|" >> $GITHUB_STEP_SUMMARY
jq -r '.measurements[] | "| \(.title) | \(.formatted) |"' stress-tests/stats.json >> $GITHUB_STEP_SUMMARY

View File

@ -1,24 +0,0 @@
name: "Check if \"Run tests\" action succeeded"
on:
pull_request:
types:
- opened
- reopened
- edited
- synchronize
jobs:
pr-run-test-action:
name: Check if "Run tests" action succeeded
timeout-minutes: 60
concurrency:
group: ${{ github.head_ref }}
cancel-in-progress: true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: node install
run: cd ./.github/helpers && npm i
- name: Calling github API
run: cd ./.github/helpers && GITHUB_TOKEN=${{ github.token }} REPO=${{ github.repository }} RUN_ID=${{ github.run_id }} BRANCH=${{ github.head_ref }} JOB_ID=${{ github.job }} SOURCE_OWNER=${{ github.event.pull_request.head.repo.owner.login }} CURRENT_SHA=${{ github.event.pull_request.head.sha }} node node_modules/github-workflow-helpers/test-pass-check-pr.js

View File

@ -1,37 +0,0 @@
name: "Run tests"
on:
workflow_dispatch:
inputs:
pluginRepoOwnerName:
description: 'supertokens-plugin-interface repo owner name'
default: supertokens
required: true
pluginInterfaceBranch:
description: 'supertokens-plugin-interface repos branch name'
default: master
required: true
jobs:
test_job:
name: Run tests
timeout-minutes: 60
runs-on: ubuntu-latest
container: rishabhpoddar/supertokens_core_testing
steps:
- uses: actions/checkout@v2
- name: Cloning supertokens-root
run: cd ../ && git clone https://github.com/supertokens/supertokens-root.git
- name: Update Java 1
run: update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-15.0.1/bin/java" 2
- name: Update Java 2
run: update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-15.0.1/bin/javac" 2
- name: Modifying modules.txt in supertokens-root
run: cd ../supertokens-root && echo "core,master\nplugin-interface,${{ github.event.inputs.pluginInterfaceBranch }},${{ github.event.inputs.pluginRepoOwnerName }}" > modules.txt
- name: Contents of modules.txt
run: cat ../supertokens-root/modules.txt
- name: Running loadModules in supertokens-root
run: cd ../supertokens-root && ./loadModules
- name: Copying current supertokens-core branch into supertokens-root
run: cd ../supertokens-root && rm -rf ./supertokens-core && cp -r ../supertokens-core ./
- name: Building and running tests
run: cd ../supertokens-root && ./startTestingEnv

123
.github/workflows/unit-test.yml vendored Normal file
View File

@ -0,0 +1,123 @@
name: Unit Tests
on:
workflow_call:
env:
total-runners: 12
jobs:
dependency-branches:
name: Dependency Branches
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
steps:
- uses: actions/checkout@v4
- uses: supertokens/get-core-dependencies-action@main
id: result
with:
run-for: PR
runner-indexes:
runs-on: ubuntu-latest
name: Generate runner indexes
needs: dependency-branches
outputs:
json: ${{ steps.generate-index-list.outputs.json }}
steps:
- id: generate-index-list
run: |
MAX_INDEX=$((${{ env.total-runners }}-1))
INDEX_LIST=$(seq 0 ${MAX_INDEX})
INDEX_JSON=$(jq --null-input --compact-output '. |= [inputs]' <<< ${INDEX_LIST})
echo "::set-output name=json::${INDEX_JSON}"
unit-tests:
runs-on: ubuntu-latest
name: "Unit tests: ${{ matrix.plugin }} plugin, runner #${{ matrix.runner-index }}"
needs:
- dependency-branches
- runner-indexes
strategy:
fail-fast: false
matrix:
runner-index: ${{ fromjson(needs.runner-indexes.outputs.json) }}
plugin:
- sqlite
- postgresql
steps:
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-plugin-interface
path: ./supertokens-root/supertokens-plugin-interface
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)['plugin-interface'] }}
- uses: actions/checkout@v2
if: matrix.plugin != 'sqlite'
with:
repository: supertokens/supertokens-${{ matrix.plugin }}-plugin
path: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin
ref: ${{ fromJson(needs.dependency-branches.outputs.branches)[matrix.plugin] }}
- name: Load Modules
run: |
cd supertokens-root
echo "core,master
plugin-interface,master
${{ matrix.plugin }}-plugin,master
" > modules.txt
cat modules.txt
./loadModules
- name: Setup test env
run: cd supertokens-root && ./utils/setupTestEnv --local
- name: Start ${{ matrix.plugin }} server
if: matrix.plugin != 'sqlite'
run: cd supertokens-root/supertokens-${{ matrix.plugin }}-plugin && ./startDb.sh
- uses: chaosaffe/split-tests@v1-alpha.1
id: split-tests
name: Split tests
with:
glob: 'supertokens-root/*/src/test/java/**/*.java'
split-total: ${{ env.total-runners }}
split-index: ${{ matrix.runner-index }}
- run: 'echo "This runner will execute the following tests: ${{ steps.split-tests.outputs.test-suite }}"'
- name: Run tests
env:
ST_PLUGIN_NAME: ${{ matrix.plugin }}
run: |
cd supertokens-root
echo "./gradlew test \\" > test.sh
chmod +x test.sh
IFS=' ' read -ra TESTS <<< "${{ steps.split-tests.outputs.test-suite }}"
for test in "${TESTS[@]}"; do
test_name="${test%.java}"
test_name="${test_name#supertokens-root/supertokens-core/src/test/java/}"
test_name="${test_name//\//.}"
echo " --tests $test_name \\" >> test.sh
done
echo "" >> test.sh
echo "this is the test command:"
cat test.sh
echo "--------------------------------"
./test.sh
- name: Publish Test Report
uses: mikepenz/action-junit-report@v5
if: always()
with:
report_paths: '**/build/test-results/test/TEST-*.xml'
detailed_summary: true
include_passed: false
annotate_notice: true

View File

@ -5,6 +5,20 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres
to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## [7.0.20]
- Adds internal opentelemetry support for logging
## [7.0.19] - 2024-03-21
- Fixes userIdMapping queries
- Fixes issue with session creation for users with userIdMapping and accounts linked
- Fixes active users tracking while linking accounts
- Adds a new required `useDynamicSigningKey` into the request body of `RefreshSessionAPI`
- This enables smooth switching between `useDynamicAccessTokenSigningKey` settings by allowing refresh calls to
change the signing key type of a session
## [7.0.18] - 2024-02-19
- Fixes vulnerabilities in dependencies

View File

@ -1,155 +1,155 @@
# Contributing
We're so excited you're interested in helping with SuperTokens! We are happy to help you get started, even if you don't
have any previous open-source experience :blush:
## New to Open Source?
1. Take a look
at [How to Contribute to an Open Source Project on GitHub](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github)
2. Go through
the [SuperTokens Code of Conduct](https://github.com/supertokens/supertokens-core/blob/master/CODE_OF_CONDUCT.md)
## Where to ask Questions?
1. Check our [Github Issues](https://github.com/supertokens/supertokens-core/issues) to see if someone has already
answered your question.
2. Join our community on [Discord](https://supertokens.io/discord) and feel free to ask us your questions
As you gain experience with SuperTokens, please help answer other people's questions! :pray:
## What to Work On?
You can get started by taking a look at our [Github issues](https://github.com/supertokens/supertokens-core/issues)
If you find one that looks interesting and no one else is already working on it, comment in the issue that you are going
to work on it.
Please ask as many questions as you need, either directly in the issue or on [Discord](https://supertokens.io/discord).
We're happy to help!:raised_hands:
### Contributions that are ALWAYS welcome
1. More tests
2. Contributing to discussions that can be
found [here](https://github.com/supertokens/supertokens-core/issues?q=is%3Aissue+is%3Aopen+label%3Adiscussions)
3. Improved error messages
4. Educational content like blogs, videos, courses
## Development Setup
### With Gitpod
1. Navigate to the [supertokens-root](https://github.com/supertokens/supertokens-root) repository
2. Click on the `Open in Gitpod` button
### Local Setup Prerequisites
- OS: Linux or macOS. Or if using Windows, you need to use [wsl2](https://docs.microsoft.com/en-us/windows/wsl/about).
- JDK: openjdk 15.0.1. Installation instructions for Mac and Linux can be found
in [our wiki](https://github.com/supertokens/supertokens-core/wiki/Installing-OpenJDK-for-Mac-and-Linux)
- IDE: [IntelliJ](https://www.jetbrains.com/idea/download/)(recommended) or equivalent IDE
### Familiarize yourself with SuperTokens
1. [Architecture of SuperTokens](https://github.com/supertokens/supertokens-core/wiki/SuperTokens-Architecture)
2. [SuperTokens code and file structure overview](https://github.com/supertokens/supertokens-core/wiki/Code-and-file-structure-overview)
3. [Versioning methodology](https://github.com/supertokens/supertokens-core/wiki/Versioning,-git-and-releases)
### Project Setup
1. Fork the [supertokens-core](https://github.com/supertokens/supertokens-core) repository (**Skip this step if you are
NOT modifying supertokens-core**)
2. `git clone https://github.com/supertokens/supertokens-root.git`
3. `cd supertokens-root`
4. Open the `modules.txt` file in an editor (**Skip this step if you are NOT modifying supertokens-core**):
- The `modules.txt` file contains the core, plugin-interface, the type of plugin and their branches(versions)
- By default the `master` branch is used but you can change the branch depending on which version you want to modify
- The `sqlite-plugin` is used as the default plugin as it is an in-memory database and requires no setup
- [core](https://github.com/supertokens/supertokens-core)
- [plugin-interface](https://github.com/supertokens/supertokens-plugin-interface)
- Check the repository branches by clicking on the links listed above, click the branch tab and check for all
the available versions
- Add your github `username` separated by a ',' after `core,master` in `modules.txt`
- If, for example, your github `username` is `helloworld` then modules.txt should look like...
```
// put module name like module name,branch name,github username(if contributing with a forked repository) and then call ./loadModules script
core,master,helloworld
plugin-interface,master
sqlite-plugin,master
```
5. Run loadModules to clone the required repositories
`./loadModules`
## Modifying code
1. Open `supetokens-root` in your IDE
2. After gradle has imported all the dependencies you can start modifying the code
## Testing
### On your local machine
1. Navigate to the `supertokens-root` repository
2. Run all tests
`./startTestEnv`
3. If all tests pass the terminal should display
- core tests:
![core tests passing](https://github.com/supertokens/supertokens-logo/blob/master/images/core-tests-passing.png)
- plugin tests:
![plugin tests passing](https://github.com/supertokens/supertokens-logo/blob/master/images/plugin-tests-passing.png)
### Using github actions
1. Go to the supertokens-core repo on github (or your forked version of it).
2. Navigate to the Actions tab.
3. Find the action named "Run tests" and navigate to it.
4. Click on the "Run workflow" button.
5. Set the config variables in the drop down:
- **supertokens-plugin-interface repo owner name**: If you have forked the supertokens-plugin-interface repo, then
set the value of this to your github username.
- **supertokens-plugin-interface repos branch name**: If the core version you are working on is compatible with a
plugin-interface version that is not in the master branch, then set the correct branch name in this value.
6. Click on "Run workflow".
## Running the core manually
1. Run `startTestEnv --wait` in a terminal, and keep it running
2. Then open `supertokens-root` in another terminal and run `cp ./temp/config.yaml .`
3. Then run `java -classpath "./core/*:./plugin-interface/*:./ee/*" io.supertokens.Main ./ DEV`. This will start the
core to listen on `http://localhost:3567`
## Pull Request
1. Before submitting a pull request make sure all tests have passed
2. Reference the relevant issue or pull request and give a clear description of changes/features added when submitting a
pull request
3. Make sure the PR title follows [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/) specification
## Install the supertokens CLI manually
1. Setup test env and keep it running
2. In `supertokens-root`, run `cp temp/config.yaml .`
3. On a different terminal, go to `supertokens-root` folder and
run `java -classpath "./cli/*" io.supertokens.cli.Main true install`
## SuperTokens Community
SuperTokens is made possible by a passionate team and a strong community of developers. If you have any questions or
would like to get more involved in the SuperTokens community you can check out:
- [Github Issues](https://github.com/supertokens/supertokens-core/issues)
- [Discord](https://supertokens.io/discord)
- [Twitter](https://twitter.com/supertokensio)
- or [email us](mailto:team@supertokens.io)
Additional resources you might find useful:
- [SuperTokens Docs](https://supertokens.io/docs/community/getting-started/installation)
- [Blog Posts](https://supertokens.io/blog/)
- [Development guideline for the backend and frontend recipes](https://github.com/supertokens/supertokens-core/wiki/Development-guideline-for-the-backend-and-frontend-recipes)
# Contributing
We're so excited you're interested in helping with SuperTokens! We are happy to help you get started, even if you don't
have any previous open-source experience :blush:
## New to Open Source?
1. Take a look
at [How to Contribute to an Open Source Project on GitHub](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github)
2. Go through
the [SuperTokens Code of Conduct](https://github.com/supertokens/supertokens-core/blob/master/CODE_OF_CONDUCT.md)
## Where to ask Questions?
1. Check our [Github Issues](https://github.com/supertokens/supertokens-core/issues) to see if someone has already
answered your question.
2. Join our community on [Discord](https://supertokens.io/discord) and feel free to ask us your questions
As you gain experience with SuperTokens, please help answer other people's questions! :pray:
## What to Work On?
You can get started by taking a look at our [Github issues](https://github.com/supertokens/supertokens-core/issues)
If you find one that looks interesting and no one else is already working on it, comment in the issue that you are going
to work on it.
Please ask as many questions as you need, either directly in the issue or on [Discord](https://supertokens.io/discord).
We're happy to help!:raised_hands:
### Contributions that are ALWAYS welcome
1. More tests
2. Contributing to discussions that can be
found [here](https://github.com/supertokens/supertokens-core/issues?q=is%3Aissue+is%3Aopen+label%3Adiscussions)
3. Improved error messages
4. Educational content like blogs, videos, courses
## Development Setup
### With Gitpod
1. Navigate to the [supertokens-root](https://github.com/supertokens/supertokens-root) repository
2. Click on the `Open in Gitpod` button
### Local Setup Prerequisites
- OS: Linux or macOS. Or if using Windows, you need to use [wsl2](https://docs.microsoft.com/en-us/windows/wsl/about).
- JDK: openjdk 15.0.1. Installation instructions for Mac and Linux can be found
in [our wiki](https://github.com/supertokens/supertokens-core/wiki/Installing-OpenJDK-for-Mac-and-Linux)
- IDE: [IntelliJ](https://www.jetbrains.com/idea/download/)(recommended) or equivalent IDE
### Familiarize yourself with SuperTokens
1. [Architecture of SuperTokens](https://github.com/supertokens/supertokens-core/wiki/SuperTokens-Architecture)
2. [SuperTokens code and file structure overview](https://github.com/supertokens/supertokens-core/wiki/Code-and-file-structure-overview)
3. [Versioning methodology](https://github.com/supertokens/supertokens-core/wiki/Versioning,-git-and-releases)
### Project Setup
1. Fork the [supertokens-core](https://github.com/supertokens/supertokens-core) repository (**Skip this step if you are
NOT modifying supertokens-core**)
2. `git clone https://github.com/supertokens/supertokens-root.git`
3. `cd supertokens-root`
4. Open the `modules.txt` file in an editor (**Skip this step if you are NOT modifying supertokens-core**):
- The `modules.txt` file contains the core, plugin-interface, the type of plugin and their branches(versions)
- By default the `master` branch is used but you can change the branch depending on which version you want to modify
- The `sqlite-plugin` is used as the default plugin as it is an in-memory database and requires no setup
- [core](https://github.com/supertokens/supertokens-core)
- [plugin-interface](https://github.com/supertokens/supertokens-plugin-interface)
- Check the repository branches by clicking on the links listed above, click the branch tab and check for all
the available versions
- Add your github `username` separated by a ',' after `core,master` in `modules.txt`
- If, for example, your github `username` is `helloworld` then modules.txt should look like...
```
// put module name like module name,branch name,github username(if contributing with a forked repository) and then call ./loadModules script
core,master,helloworld
plugin-interface,master
sqlite-plugin,master
```
5. Run loadModules to clone the required repositories
`./loadModules`
## Modifying code
1. Open `supetokens-root` in your IDE
2. After gradle has imported all the dependencies you can start modifying the code
## Testing
### On your local machine
1. Navigate to the `supertokens-root` repository
2. Run all tests
`./startTestEnv`
3. If all tests pass the terminal should display
- core tests:
![core tests passing](https://github.com/supertokens/supertokens-logo/blob/master/images/core-tests-passing.png)
- plugin tests:
![plugin tests passing](https://github.com/supertokens/supertokens-logo/blob/master/images/plugin-tests-passing.png)
### Using github actions
1. Go to the supertokens-core repo on github (or your forked version of it).
2. Navigate to the Actions tab.
3. Find the action named "Run tests" and navigate to it.
4. Click on the "Run workflow" button.
5. Set the config variables in the drop down:
- **supertokens-plugin-interface repo owner name**: If you have forked the supertokens-plugin-interface repo, then
set the value of this to your github username.
- **supertokens-plugin-interface repos branch name**: If the core version you are working on is compatible with a
plugin-interface version that is not in the master branch, then set the correct branch name in this value.
6. Click on "Run workflow".
## Running the core manually
1. Run `startTestEnv --wait` in a terminal, and keep it running
2. Then open `supertokens-root` in another terminal and run `cp ./temp/config.yaml .`
3. Then run `java -classpath "./core/*:./plugin-interface/*:./ee/*" io.supertokens.Main ./ DEV`. This will start the
core to listen on `http://localhost:3567`
## Pull Request
1. Before submitting a pull request make sure all tests have passed
2. Reference the relevant issue or pull request and give a clear description of changes/features added when submitting a
pull request
3. Make sure the PR title follows [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/) specification
## Install the supertokens CLI manually
1. Setup test env and keep it running
2. In `supertokens-root`, run `cp temp/config.yaml .`
3. On a different terminal, go to `supertokens-root` folder and
run `java -classpath "./cli/*" io.supertokens.cli.Main true install`
## SuperTokens Community
SuperTokens is made possible by a passionate team and a strong community of developers. If you have any questions or
would like to get more involved in the SuperTokens community you can check out:
- [Github Issues](https://github.com/supertokens/supertokens-core/issues)
- [Discord](https://supertokens.io/discord)
- [Twitter](https://twitter.com/supertokensio)
- or [email us](mailto:team@supertokens.io)
Additional resources you might find useful:
- [SuperTokens Docs](https://supertokens.io/docs/community/getting-started/installation)
- [Blog Posts](https://supertokens.io/blog/)
- [Development guideline for the backend and frontend recipes](https://github.com/supertokens/supertokens-core/wiki/Development-guideline-for-the-backend-and-frontend-recipes)

View File

@ -19,8 +19,7 @@ compileTestJava { options.encoding = "UTF-8" }
// }
//}
version = "7.0.18"
version = "7.0.20"
repositories {
mavenCentral()
@ -38,9 +37,6 @@ dependencies {
// https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
// https://mvnrepository.com/artifact/ch.qos.logback/logback-classic
implementation group: 'ch.qos.logback', name: 'logback-classic', version: '1.4.14'
// https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-core
implementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '10.1.18'
@ -74,6 +70,20 @@ dependencies {
// https://mvnrepository.com/artifact/com.googlecode.libphonenumber/libphonenumber/
implementation group: 'com.googlecode.libphonenumber', name: 'libphonenumber', version: '8.13.25'
implementation platform("io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:2.17.0-alpha")
implementation("ch.qos.logback:logback-core:1.5.18")
implementation("ch.qos.logback:logback-classic:1.5.18")
// OpenTelemetry core
implementation("io.opentelemetry:opentelemetry-sdk")
implementation("io.opentelemetry:opentelemetry-exporter-otlp")
implementation("io.opentelemetry:opentelemetry-exporter-logging")
implementation("io.opentelemetry:opentelemetry-api")
implementation("io.opentelemetry.semconv:opentelemetry-semconv")
compileOnly project(":supertokens-plugin-interface")
testImplementation project(":supertokens-plugin-interface")

Binary file not shown.

View File

@ -151,3 +151,7 @@ core_config_version: 0
# (OPTIONAL | Default: null) string value. If specified, the supertokens service will only load the specified CUD even
# if there are more CUDs in the database and block all other CUDs from being used from this instance.
# supertokens_saas_load_only_cud:
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:

View File

@ -151,3 +151,7 @@ disable_telemetry: true
# (OPTIONAL | Default: null) string value. If specified, the supertokens service will only load the specified CUD even
# if there are more CUDs in the database and block all other CUDs from being used from this instance.
# supertokens_saas_load_only_cud:
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:

Binary file not shown.

Binary file not shown.

View File

@ -1,120 +1,125 @@
{
"_comment": "Contains list of implementation dependencies URL for this project",
"list": [
{
"jar": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name": "Gson 2.3.1",
"src": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name": "Jackson Dataformat 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name": "SnakeYAML 2.2",
"src": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1.jar",
"name": "Jackson core 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name": "Jackson databind 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1.jar",
"name": "Jackson annotation 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/ch/qos/logback/logback-classic/1.4.14/logback-classic-1.4.14.jar",
"name": "Logback classic 1.4.14",
"src": "https://repo1.maven.org/maven2/ch/qos/logback/logback-classic/1.4.14/logback-classic-1.4.14-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/ch/qos/logback/logback-core/1.4.14/logback-core-1.4.14.jar",
"name": "Logback core 1.4.14",
"src": "https://repo1.maven.org/maven2/ch/qos/logback/logback-core/1.4.14/logback-core-1.4.14-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/slf4j/slf4j-api/2.0.7/slf4j-api-2.0.7.jar",
"name": "SLF4j API 2.0.7",
"src": "https://repo1.maven.org/maven2/org/slf4j/slf4j-api/2.0.7/slf4j-api-2.0.7-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18.jar",
"name": "Tomcat annotations API 10.1.18",
"src": "https://repo1.maven.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18.jar",
"name": "Tomcat embed core API 10.1.1",
"src": "https://repo1.maven.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
"name": "JSR305 3.0.2",
"src": "https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0.jar",
"name": "JSR305 3.0.2",
"src": "https://repo1.maven.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0.jar",
"name": "SQLite JDBC Driver 3.45.1.0",
"src": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name": "JBCrypt 0.4",
"src": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0.jar",
"name": "Auth0 Java JWT",
"src": "https://repo1.maven.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name": "Argon2-jvm 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11.jar",
"name": "Argon2-jvm no libs 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0.jar",
"name": "JNA 5.8.0",
"src": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0.jar",
"name": "Scrypt 1.4.0",
"src": "https://repo1.maven.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0.jar",
"name": "Java OTP 0.4.0",
"src": "https://repo1.maven.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15.jar",
"name": "Commons Codec 1.15",
"src": "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25.jar",
"name": "Libphonenumber 8.13.25",
"src": "https://repo1.maven.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25-sources.jar"
}
]
"_comment": "Contains list of implementation dependencies URL for this project. This is a generated file, don't modify the contents by hand.",
"list": [
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name":"gson 2.3.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name":"jackson-dataformat-yaml 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name":"snakeyaml 2.2",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name":"jackson-databind 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18.jar",
"name":"tomcat-embed-core 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18.jar",
"name":"tomcat-annotations-api 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
"name":"jsr305 3.0.2",
"src":"https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0.jar",
"name":"sqlite-jdbc 3.45.1.0",
"src":"https://repo.maven.apache.org/maven2/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/2.0.17/slf4j-api-2.0.17.jar",
"name":"slf4j-api 2.0.17",
"src":"https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/2.0.17/slf4j-api-2.0.17-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name":"jbcrypt 0.4",
"src":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0.jar",
"name":"annotations 13.0",
"src":"https://repo.maven.apache.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name":"argon2-jvm 2.11",
"src":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0.jar",
"name":"java-jwt 4.4.0",
"src":"https://repo.maven.apache.org/maven2/com/auth0/java-jwt/4.4.0/java-jwt-4.4.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0.jar",
"name":"scrypt 1.4.0",
"src":"https://repo.maven.apache.org/maven2/com/lambdaworks/scrypt/1.4.0/scrypt-1.4.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0.jar",
"name":"java-otp 0.4.0",
"src":"https://repo.maven.apache.org/maven2/com/eatthepath/java-otp/0.4.0/java-otp-0.4.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15.jar",
"name":"commons-codec 1.15",
"src":"https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.15/commons-codec-1.15-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25.jar",
"name":"libphonenumber 8.13.25",
"src":"https://repo.maven.apache.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/1.5.18/logback-core-1.5.18.jar",
"name":"logback-core 1.5.18",
"src":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/1.5.18/logback-core-1.5.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/1.5.18/logback-classic-1.5.18.jar",
"name":"logback-classic 1.5.18",
"src":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/1.5.18/logback-classic-1.5.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-api/1.51.0/opentelemetry-api-1.51.0.jar",
"name":"opentelemetry-api 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-api/1.51.0/opentelemetry-api-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-logging/1.51.0/opentelemetry-exporter-logging-1.51.0.jar",
"name":"opentelemetry-exporter-logging 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-logging/1.51.0/opentelemetry-exporter-logging-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-sdk/1.51.0/opentelemetry-sdk-1.51.0.jar",
"name":"opentelemetry-sdk 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-sdk/1.51.0/opentelemetry-sdk-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-otlp/1.51.0/opentelemetry-exporter-otlp-1.51.0.jar",
"name":"opentelemetry-exporter-otlp 1.51.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-exporter-otlp/1.51.0/opentelemetry-exporter-otlp-1.51.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/semconv/opentelemetry-semconv/1.34.0/opentelemetry-semconv-1.34.0.jar",
"name":"opentelemetry-semconv 1.34.0",
"src":"https://repo.maven.apache.org/maven2/io/opentelemetry/semconv/opentelemetry-semconv/1.34.0/opentelemetry-semconv-1.34.0-sources.jar"
}
]
}

View File

@ -1,10 +1,12 @@
package io.supertokens;
import io.supertokens.pluginInterface.ActiveUsersStorage;
import io.supertokens.pluginInterface.authRecipe.sqlStorage.AuthRecipeSQLStorage;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifierWithStorage;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.sqlStorage.SQLStorage;
import io.supertokens.storageLayer.StorageLayer;
import org.jetbrains.annotations.TestOnly;
@ -33,6 +35,19 @@ public class ActiveUsers {
return appIdentifierWithStorage.getActiveUsersStorage().countUsersActiveSince(appIdentifierWithStorage, time);
}
public static void updateLastActiveAfterLinking(AppIdentifierWithStorage appIdentifierWithStorage,
Main main, String primaryUserId, String recipeUserId)
throws StorageQueryException, TenantOrAppNotFoundException, StorageTransactionLogicException {
ActiveUsersStorage activeUsersStorage = appIdentifierWithStorage.getActiveUsersStorage();
((SQLStorage) activeUsersStorage).startTransaction(con -> {
activeUsersStorage.deleteUserActive_Transaction(con, appIdentifierWithStorage, recipeUserId);
return null;
});
updateLastActive(appIdentifierWithStorage, main, primaryUserId);
}
@TestOnly
public static int countUsersActiveSince(Main main, long time)
throws StorageQueryException, TenantOrAppNotFoundException {

View File

@ -44,6 +44,7 @@ import io.supertokens.signingkeys.AccessTokenSigningKey;
import io.supertokens.signingkeys.JWTSigningKey;
import io.supertokens.signingkeys.SigningKeys;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.version.Version;
import io.supertokens.webserver.Webserver;
import org.jetbrains.annotations.TestOnly;
@ -159,6 +160,8 @@ public class Main {
Logging.info(this, TenantIdentifier.BASE_TENANT, "Completed config.yaml loading.", true);
TelemetryProvider.initialize(this);
// loading storage layer
try {
StorageLayer.initPrimary(this, CLIOptions.get(this).getInstallationPath() + "plugin/",
@ -420,6 +423,7 @@ public class Main {
StorageLayer.close(this);
removeDotStartedFileForThisProcess();
Logging.stopLogging(this);
TelemetryProvider.closeTelemetry(this);
// uncomment this when you want to confirm that processes are actually shut.
// printRunningThreadNames();

View File

@ -209,6 +209,10 @@ public class CoreConfig {
@IgnoreForAnnotationCheck
private boolean isNormalizedAndValid = false;
@ConfigYamlOnly
@JsonProperty
private String otel_collector_connection_uri = "http://localhost:4317";
public static Set<String> getValidFields() {
CoreConfig coreConfig = new CoreConfig();
JsonObject coreConfigObj = new GsonBuilder().serializeNulls().create().toJsonTree(coreConfig).getAsJsonObject();
@ -398,6 +402,10 @@ public class CoreConfig {
return webserver_https_enabled;
}
public String getOtelCollectorConnectionURI() {
return otel_collector_connection_uri;
}
private String getConfigFileLocation(Main main) {
return new File(CLIOptions.get(main).getConfigFilePath() == null
? CLIOptions.get(main).getInstallationPath() + "config.yaml"

View File

@ -520,11 +520,11 @@ public class Start
@Override
public void updateSessionInfo_Transaction(TenantIdentifier tenantIdentifier, TransactionConnection con,
String sessionHandle, String refreshTokenHash2,
long expiry) throws StorageQueryException {
long expiry, boolean useStaticKey) throws StorageQueryException {
Connection sqlCon = (Connection) con.getConnection();
try {
SessionQueries.updateSessionInfo_Transaction(this, sqlCon, tenantIdentifier, sessionHandle,
refreshTokenHash2, expiry);
refreshTokenHash2, expiry, useStaticKey);
} catch (SQLException e) {
throw new StorageQueryException(e);
}
@ -2193,10 +2193,11 @@ public class Start
}
@Override
public HashMap<String, String> getUserIdMappingForSuperTokensIds(ArrayList<String> userIds)
public HashMap<String, String> getUserIdMappingForSuperTokensIds(AppIdentifier appIdentifier,
ArrayList<String> userIds)
throws StorageQueryException {
try {
return UserIdMappingQueries.getUserIdMappingWithUserIds(this, userIds);
return UserIdMappingQueries.getUserIdMappingWithUserIds(this, appIdentifier, userIds);
} catch (SQLException e) {
throw new StorageQueryException(e);
}

View File

@ -289,7 +289,7 @@ public class EmailVerificationQueries {
// calculating the verified emails
HashMap<String, String> supertokensUserIdToExternalUserIdMap = UserIdMappingQueries.getUserIdMappingWithUserIds_Transaction(start,
sqlCon, supertokensUserIds);
sqlCon, appIdentifier, supertokensUserIds);
HashMap<String, String> externalUserIdToSupertokensUserIdMap = new HashMap<>();
List<String> supertokensOrExternalUserIdsToQuery = new ArrayList<>();
@ -357,7 +357,7 @@ public class EmailVerificationQueries {
// We have external user id stored in the email verification table, so we need to fetch the mapped userids for
// calculating the verified emails
HashMap<String, String> supertokensUserIdToExternalUserIdMap = UserIdMappingQueries.getUserIdMappingWithUserIds(start,
supertokensUserIds);
appIdentifier, supertokensUserIds);
HashMap<String, String> externalUserIdToSupertokensUserIdMap = new HashMap<>();
List<String> supertokensOrExternalUserIdsToQuery = new ArrayList<>();
for (String userId : supertokensUserIds) {

View File

@ -147,18 +147,19 @@ public class SessionQueries {
public static void updateSessionInfo_Transaction(Start start, Connection con, TenantIdentifier tenantIdentifier,
String sessionHandle,
String refreshTokenHash2, long expiry)
String refreshTokenHash2, long expiry, boolean useStaticKey)
throws SQLException, StorageQueryException {
String QUERY = "UPDATE " + getConfig(start).getSessionInfoTable()
+ " SET refresh_token_hash_2 = ?, expires_at = ?"
+ " SET refresh_token_hash_2 = ?, expires_at = ?, use_static_key = ?"
+ " WHERE app_id = ? AND tenant_id = ? AND session_handle = ?";
update(con, QUERY, pst -> {
pst.setString(1, refreshTokenHash2);
pst.setLong(2, expiry);
pst.setString(3, tenantIdentifier.getAppId());
pst.setString(4, tenantIdentifier.getTenantId());
pst.setString(5, sessionHandle);
pst.setBoolean(3, useStaticKey);
pst.setString(4, tenantIdentifier.getAppId());
pst.setString(5, tenantIdentifier.getTenantId());
pst.setString(6, sessionHandle);
});
}

View File

@ -136,7 +136,9 @@ public class UserIdMappingQueries {
}
public static HashMap<String, String> getUserIdMappingWithUserIds(Start start, List<String> userIds)
public static HashMap<String, String> getUserIdMappingWithUserIds(Start start,
AppIdentifier appIdentifier,
List<String> userIds)
throws SQLException, StorageQueryException {
if (userIds.size() == 0) {
@ -145,7 +147,8 @@ public class UserIdMappingQueries {
// No need to filter based on tenantId because the id list is already filtered for a tenant
StringBuilder QUERY = new StringBuilder(
"SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable() + " WHERE supertokens_user_id IN (");
"SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable() + " WHERE app_id = ? AND " +
"supertokens_user_id IN (");
for (int i = 0; i < userIds.size(); i++) {
QUERY.append("?");
if (i != userIds.size() - 1) {
@ -155,9 +158,10 @@ public class UserIdMappingQueries {
}
QUERY.append(")");
return execute(start, QUERY.toString(), pst -> {
pst.setString(1, appIdentifier.getAppId());
for (int i = 0; i < userIds.size(); i++) {
// i+1 cause this starts with 1 and not 0
pst.setString(i + 1, userIds.get(i));
// i+2 cause this starts with 1 and not 0, 1 is appId
pst.setString(i + 2, userIds.get(i));
}
}, result -> {
HashMap<String, String> userIdMappings = new HashMap<>();
@ -169,7 +173,9 @@ public class UserIdMappingQueries {
});
}
public static HashMap<String, String> getUserIdMappingWithUserIds_Transaction(Start start, Connection sqlCon, List<String> userIds)
public static HashMap<String, String> getUserIdMappingWithUserIds_Transaction(Start start, Connection sqlCon,
AppIdentifier appIdentifier,
List<String> userIds)
throws SQLException, StorageQueryException {
if (userIds.size() == 0) {
@ -178,7 +184,8 @@ public class UserIdMappingQueries {
// No need to filter based on tenantId because the id list is already filtered for a tenant
StringBuilder QUERY = new StringBuilder(
"SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable() + " WHERE supertokens_user_id IN (");
"SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable() + " WHERE app_id = ? AND " +
"supertokens_user_id IN (");
for (int i = 0; i < userIds.size(); i++) {
QUERY.append("?");
if (i != userIds.size() - 1) {
@ -188,9 +195,10 @@ public class UserIdMappingQueries {
}
QUERY.append(")");
return execute(sqlCon, QUERY.toString(), pst -> {
pst.setString(1, appIdentifier.getAppId());
for (int i = 0; i < userIds.size(); i++) {
// i+1 cause this starts with 1 and not 0
pst.setString(i + 1, userIds.get(i));
// i+2 cause this starts with 1 and not 0, 1 is appId
pst.setString(i + 2, userIds.get(i));
}
}, result -> {
HashMap<String, String> userIdMappings = new HashMap<>();

View File

@ -29,6 +29,7 @@ import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.utils.Utils;
import io.supertokens.webserver.Webserver;
import org.slf4j.LoggerFactory;
@ -109,6 +110,7 @@ public class Logging extends ResourceDistributor.SingletonResource {
msg = prependTenantIdentifierToMessage(tenantIdentifier, msg);
if (getInstance(main) != null) {
getInstance(main).infoLogger.debug(msg);
TelemetryProvider.createLogEvent(main, tenantIdentifier, msg, "debug");
}
} catch (NullPointerException e) {
// sometimes logger.debug throws a null pointer exception...
@ -132,6 +134,8 @@ public class Logging extends ResourceDistributor.SingletonResource {
if (getInstance(main) != null) {
getInstance(main).infoLogger.info(msg);
}
TelemetryProvider.createLogEvent(main, tenantIdentifier, msg, "info");
} catch (NullPointerException ignored) {
}
}
@ -145,6 +149,8 @@ public class Logging extends ResourceDistributor.SingletonResource {
msg = prependTenantIdentifierToMessage(tenantIdentifier, msg);
if (getInstance(main) != null) {
getInstance(main).errorLogger.warn(msg);
TelemetryProvider.createLogEvent(main, tenantIdentifier, msg, "warn");
}
} catch (NullPointerException ignored) {
}
@ -166,6 +172,7 @@ public class Logging extends ResourceDistributor.SingletonResource {
err = prependTenantIdentifierToMessage(tenantIdentifier, err);
if (getInstance(main) != null) {
getInstance(main).errorLogger.error(err);
TelemetryProvider.createLogEvent(main, tenantIdentifier, err, "error");
}
if (toConsoleAsWell || getInstance(main) == null) {
systemErr(err);
@ -199,6 +206,9 @@ public class Logging extends ResourceDistributor.SingletonResource {
message = prependTenantIdentifierToMessage(tenantIdentifier, message);
if (getInstance(main) != null) {
getInstance(main).errorLogger.error(message);
TelemetryProvider
.createLogEvent(main, tenantIdentifier, message,
"error");
}
if (toConsoleAsWell || getInstance(main) == null) {
systemErr(message);

View File

@ -46,6 +46,8 @@ import io.supertokens.session.info.TokenInfo;
import io.supertokens.session.jwt.JWT;
import io.supertokens.session.refreshToken.RefreshToken;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.useridmapping.UserIdMapping;
import io.supertokens.useridmapping.UserIdType;
import io.supertokens.utils.Utils;
import org.jetbrains.annotations.TestOnly;
@ -137,12 +139,29 @@ public class Session {
}
String primaryUserId = recipeUserId;
if (tenantIdentifierWithStorage.getStorage().getType().equals(STORAGE_TYPE.SQL)) {
if (tenantIdentifierWithStorage.getStorage().getType() == STORAGE_TYPE.SQL) {
io.supertokens.pluginInterface.useridmapping.UserIdMapping userIdMapping = UserIdMapping.getUserIdMapping(
tenantIdentifierWithStorage.toAppIdentifierWithStorage(), recipeUserId, UserIdType.EXTERNAL);
if (userIdMapping != null) {
recipeUserId = userIdMapping.superTokensUserId;
}
primaryUserId = tenantIdentifierWithStorage.getAuthRecipeStorage()
.getPrimaryUserIdStrForUserId(tenantIdentifierWithStorage.toAppIdentifier(), recipeUserId);
if (primaryUserId == null) {
primaryUserId = recipeUserId;
}
HashMap<String, String> userIdMappings = UserIdMapping.getUserIdMappingForSuperTokensUserIds(
tenantIdentifierWithStorage.toAppIdentifierWithStorage(),
new ArrayList<>(Arrays.asList(primaryUserId, recipeUserId)));
if (userIdMappings.containsKey(primaryUserId)) {
primaryUserId = userIdMappings.get(primaryUserId);
}
if (userIdMappings.containsKey(recipeUserId)) {
recipeUserId = userIdMappings.get(recipeUserId);
}
}
String antiCsrfToken = enableAntiCsrf ? UUID.randomUUID().toString() : null;
@ -377,7 +396,7 @@ public class Session {
accessToken.sessionHandle,
Utils.hashSHA256(accessToken.refreshTokenHash1),
System.currentTimeMillis() +
config.getRefreshTokenValidity());
config.getRefreshTokenValidity(), sessionInfo.useStaticKey);
}
storage.commitTransaction(con);
@ -454,7 +473,7 @@ public class Session {
Utils.hashSHA256(accessToken.refreshTokenHash1),
System.currentTimeMillis() + Config.getConfig(tenantIdentifierWithStorage, main)
.getRefreshTokenValidity(),
sessionInfo.lastUpdatedSign);
sessionInfo.lastUpdatedSign, sessionInfo.useStaticKey);
if (!success) {
continue;
}
@ -509,7 +528,7 @@ public class Session {
UnsupportedJWTSigningAlgorithmException, AccessTokenPayloadError {
try {
return refreshSession(new AppIdentifier(null, null), main, refreshToken, antiCsrfToken,
enableAntiCsrf, accessTokenVersion);
enableAntiCsrf, accessTokenVersion, null);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
@ -518,7 +537,8 @@ public class Session {
public static SessionInformationHolder refreshSession(AppIdentifier appIdentifier, Main main,
@Nonnull String refreshToken,
@Nullable String antiCsrfToken, boolean enableAntiCsrf,
AccessToken.VERSION accessTokenVersion)
AccessToken.VERSION accessTokenVersion,
Boolean shouldUseStaticKey)
throws StorageTransactionLogicException,
UnauthorisedException, StorageQueryException, TokenTheftDetectedException,
UnsupportedJWTSigningAlgorithmException, AccessTokenPayloadError, TenantOrAppNotFoundException {
@ -534,14 +554,15 @@ public class Session {
return refreshSessionHelper(refreshTokenInfo.tenantIdentifier.withStorage(
StorageLayer.getStorage(refreshTokenInfo.tenantIdentifier, main)),
main, refreshToken, refreshTokenInfo, enableAntiCsrf, accessTokenVersion);
main, refreshToken, refreshTokenInfo, enableAntiCsrf, accessTokenVersion, shouldUseStaticKey);
}
private static SessionInformationHolder refreshSessionHelper(
TenantIdentifierWithStorage tenantIdentifierWithStorage, Main main, String refreshToken,
RefreshToken.RefreshTokenInfo refreshTokenInfo,
boolean enableAntiCsrf,
AccessToken.VERSION accessTokenVersion)
AccessToken.VERSION accessTokenVersion,
Boolean shouldUseStaticKey)
throws StorageTransactionLogicException, UnauthorisedException, StorageQueryException,
TokenTheftDetectedException, UnsupportedJWTSigningAlgorithmException, AccessTokenPayloadError,
TenantOrAppNotFoundException {
@ -566,7 +587,16 @@ public class Session {
throw new UnauthorisedException("Session missing in db or has expired");
}
boolean useStaticKey = shouldUseStaticKey != null ? shouldUseStaticKey : sessionInfo.useStaticKey;
if (sessionInfo.refreshTokenHash2.equals(Utils.hashSHA256(Utils.hashSHA256(refreshToken)))) {
if (useStaticKey != sessionInfo.useStaticKey) {
// We do not update anything except the static key status
storage.updateSessionInfo_Transaction(tenantIdentifierWithStorage, con, sessionHandle,
sessionInfo.refreshTokenHash2, sessionInfo.expiry,
useStaticKey);
}
// at this point, the input refresh token is the parent one.
storage.commitTransaction(con);
@ -580,7 +610,7 @@ public class Session {
sessionInfo.recipeUserId, sessionInfo.userId,
Utils.hashSHA256(newRefreshToken.token),
Utils.hashSHA256(refreshToken), sessionInfo.userDataInJWT, antiCsrfToken,
null, accessTokenVersion, sessionInfo.useStaticKey);
null, accessTokenVersion, useStaticKey);
TokenInfo idRefreshToken = new TokenInfo(UUID.randomUUID().toString(),
newRefreshToken.expiry, newRefreshToken.createdTime);
@ -600,13 +630,13 @@ public class Session {
.equals(sessionInfo.refreshTokenHash2))) {
storage.updateSessionInfo_Transaction(tenantIdentifierWithStorage, con, sessionHandle,
Utils.hashSHA256(Utils.hashSHA256(refreshToken)),
System.currentTimeMillis() + config.getRefreshTokenValidity());
System.currentTimeMillis() + config.getRefreshTokenValidity(), useStaticKey);
storage.commitTransaction(con);
return refreshSessionHelper(tenantIdentifierWithStorage, main, refreshToken,
refreshTokenInfo, enableAntiCsrf,
accessTokenVersion);
accessTokenVersion, useStaticKey);
}
storage.commitTransaction(con);
@ -655,7 +685,18 @@ public class Session {
throw new UnauthorisedException("Session missing in db or has expired");
}
boolean useStaticKey = shouldUseStaticKey != null ? shouldUseStaticKey : sessionInfo.useStaticKey;
if (sessionInfo.refreshTokenHash2.equals(Utils.hashSHA256(Utils.hashSHA256(refreshToken)))) {
if (sessionInfo.useStaticKey != useStaticKey) {
// We do not update anything except the static key status
boolean success = storage.updateSessionInfo_Transaction(sessionHandle,
sessionInfo.refreshTokenHash2, sessionInfo.expiry,
sessionInfo.lastUpdatedSign, useStaticKey);
if (!success) {
continue;
}
}
// at this point, the input refresh token is the parent one.
String antiCsrfToken = enableAntiCsrf ? UUID.randomUUID().toString() : null;
@ -666,7 +707,7 @@ public class Session {
sessionHandle,
sessionInfo.recipeUserId, sessionInfo.userId, Utils.hashSHA256(newRefreshToken.token),
Utils.hashSHA256(refreshToken), sessionInfo.userDataInJWT, antiCsrfToken,
null, accessTokenVersion, sessionInfo.useStaticKey);
null, accessTokenVersion, useStaticKey);
TokenInfo idRefreshToken = new TokenInfo(UUID.randomUUID().toString(), newRefreshToken.expiry,
newRefreshToken.createdTime);
@ -688,13 +729,13 @@ public class Session {
Utils.hashSHA256(Utils.hashSHA256(refreshToken)),
System.currentTimeMillis() +
Config.getConfig(tenantIdentifierWithStorage, main).getRefreshTokenValidity(),
sessionInfo.lastUpdatedSign);
sessionInfo.lastUpdatedSign, useStaticKey);
if (!success) {
continue;
}
return refreshSessionHelper(tenantIdentifierWithStorage, main, refreshToken, refreshTokenInfo,
enableAntiCsrf,
accessTokenVersion);
accessTokenVersion, shouldUseStaticKey);
}
throw new TokenTheftDetectedException(sessionHandle, sessionInfo.recipeUserId, sessionInfo.userId);

View File

@ -0,0 +1,167 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.telemetry;
import io.opentelemetry.api.GlobalOpenTelemetry;
import io.opentelemetry.api.OpenTelemetry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.Span;
import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.propagation.ContextPropagators;
import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter;
import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter;
import io.opentelemetry.sdk.OpenTelemetrySdk;
import io.opentelemetry.sdk.logs.SdkLoggerProvider;
import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.trace.SdkTracerProvider;
import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.config.Config;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import org.jetbrains.annotations.TestOnly;
import java.util.concurrent.TimeUnit;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME;
public class TelemetryProvider extends ResourceDistributor.SingletonResource {
private static final String RESOURCE_ID = "io.supertokens.telemetry.TelemetryProvider";
private final OpenTelemetry openTelemetry;
private static synchronized TelemetryProvider getInstance(Main main) {
TelemetryProvider instance = null;
try {
instance = (TelemetryProvider) main.getResourceDistributor()
.getResource(TenantIdentifier.BASE_TENANT, RESOURCE_ID);
} catch (TenantOrAppNotFoundException ignored) {
}
return instance;
}
public static void initialize(Main main) {
main.getResourceDistributor()
.setResource(TenantIdentifier.BASE_TENANT, RESOURCE_ID, new TelemetryProvider(main));
}
public static void createLogEvent(Main main, TenantIdentifier tenantIdentifier, String logMessage,
String logLevel) {
getInstance(main).openTelemetry.getTracer("core-tracer")
.spanBuilder(logLevel)
.setParent(Context.current())
.setAttribute("tenant.connectionUriDomain", tenantIdentifier.getConnectionUriDomain())
.setAttribute("tenant.appId", tenantIdentifier.getAppId())
.setAttribute("tenant.tenantId", tenantIdentifier.getTenantId())
.startSpan()
.addEvent("log",
Attributes.builder()
.put("message", logMessage)
.build(),
System.currentTimeMillis(), TimeUnit.MILLISECONDS)
.end();
}
public static Span startSpan(Main main, TenantIdentifier tenantIdentifier, String spanName) {
Span span = getInstance(main).openTelemetry.getTracer("core-tracer")
.spanBuilder(spanName)
.setParent(Context.current())
.setAttribute("tenant.connectionUriDomain", tenantIdentifier.getConnectionUriDomain())
.setAttribute("tenant.appId", tenantIdentifier.getAppId())
.setAttribute("tenant.tenantId", tenantIdentifier.getTenantId())
.startSpan();
span.makeCurrent(); // Set the span as the current context
return span;
}
public static Span endSpan(Span span) {
if (span != null) {
span.end();
}
return span;
}
public static Span addEventToSpan(Span span, String eventName, Attributes attributes) {
if (span != null) {
span.addEvent(eventName, attributes, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
}
return span;
}
private static OpenTelemetry initializeOpenTelemetry(Main main) {
if (getInstance(main) != null && getInstance(main).openTelemetry != null) {
return getInstance(main).openTelemetry; // already initialized
}
Resource resource = Resource.getDefault().toBuilder()
.put(SERVICE_NAME, "supertokens-core")
.build();
String collectorUri = Config.getBaseConfig(main).getOtelCollectorConnectionURI();
SdkTracerProvider sdkTracerProvider =
SdkTracerProvider.builder()
.setResource(resource)
.addSpanProcessor(SimpleSpanProcessor.create(OtlpGrpcSpanExporter.builder()
.setEndpoint(collectorUri) // otel collector
.build()))
.build();
OpenTelemetrySdk sdk =
OpenTelemetrySdk.builder()
.setTracerProvider(sdkTracerProvider)
.setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance()))
.setLoggerProvider(
SdkLoggerProvider.builder()
.setResource(resource)
.addLogRecordProcessor(
BatchLogRecordProcessor.builder(
OtlpGrpcLogRecordExporter.builder()
.setEndpoint(collectorUri)
.build())
.build())
.build())
.build();
// Add hook to close SDK, which flushes logs
Runtime.getRuntime().addShutdownHook(new Thread(sdk::close));
return sdk;
}
@TestOnly
public static void resetForTest() {
GlobalOpenTelemetry.resetForTest();
}
public static void closeTelemetry(Main main) {
OpenTelemetry telemetry = getInstance(main).openTelemetry;
if (telemetry instanceof OpenTelemetrySdk) {
((OpenTelemetrySdk) telemetry).close();
}
}
private TelemetryProvider(Main main) {
openTelemetry = initializeOpenTelemetry(main);
}
}

View File

@ -323,7 +323,8 @@ public class UserIdMapping {
ArrayList<String> userIds)
throws StorageQueryException {
// userIds are already filtered for a tenant, so this becomes a tenant specific operation.
return tenantIdentifierWithStorage.getUserIdMappingStorage().getUserIdMappingForSuperTokensIds(userIds);
return tenantIdentifierWithStorage.getUserIdMappingStorage().getUserIdMappingForSuperTokensIds(
tenantIdentifierWithStorage.toAppIdentifier(), userIds);
}
public static HashMap<String, String> getUserIdMappingForSuperTokensUserIds(
@ -331,7 +332,8 @@ public class UserIdMapping {
ArrayList<String> userIds)
throws StorageQueryException {
// userIds are already filtered for a tenant, so this becomes a tenant specific operation.
return appIdentifierWithStorage.getUserIdMappingStorage().getUserIdMappingForSuperTokensIds(userIds);
return appIdentifierWithStorage.getUserIdMappingStorage().getUserIdMappingForSuperTokensIds(
appIdentifierWithStorage, userIds);
}
@TestOnly

View File

@ -106,6 +106,16 @@ public class LinkAccountsAPI extends WebserverAPI {
response.addProperty("status", "OK");
response.addProperty("accountsAlreadyLinked", linkAccountsResult.wasAlreadyLinked);
response.add("user", linkAccountsResult.user.toJson());
if (!linkAccountsResult.wasAlreadyLinked) {
try {
ActiveUsers.updateLastActiveAfterLinking(
getPublicTenantStorage(req), main, primaryUserId, recipeUserId);
} catch (Exception e) {
// ignore
}
}
super.sendJsonResponse(200, response, resp);
} catch (StorageQueryException | TenantOrAppNotFoundException | FeatureNotEnabledException e) {
throw new ServletException(e);

View File

@ -61,10 +61,14 @@ public class RefreshSessionAPI extends WebserverAPI {
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
// API is app specific, but session is updated based on tenantId obtained from the refreshToken
SemVer version = super.getVersionFromRequest(req);
JsonObject input = InputParser.parseJsonObjectOrThrowError(req);
String refreshToken = InputParser.parseStringOrThrowError(input, "refreshToken", false);
String antiCsrfToken = InputParser.parseStringOrThrowError(input, "antiCsrfToken", true);
Boolean enableAntiCsrf = InputParser.parseBooleanOrThrowError(input, "enableAntiCsrf", false);
Boolean useDynamicSigningKey = version.greaterThanOrEqualTo(SemVer.v3_0) ?
InputParser.parseBooleanOrThrowError(input, "useDynamicSigningKey", true) : null;
assert enableAntiCsrf != null;
assert refreshToken != null;
@ -75,13 +79,13 @@ public class RefreshSessionAPI extends WebserverAPI {
throw new ServletException(e);
}
SemVer version = super.getVersionFromRequest(req);
try {
AccessToken.VERSION accessTokenVersion = AccessToken.getAccessTokenVersionForCDI(version);
SessionInformationHolder sessionInfo = Session.refreshSession(appIdentifierWithStorage, main,
refreshToken, antiCsrfToken,
enableAntiCsrf, accessTokenVersion);
enableAntiCsrf, accessTokenVersion,
useDynamicSigningKey == null ? null : Boolean.FALSE.equals(useDynamicSigningKey));
if (StorageLayer.getStorage(this.getTenantIdentifierWithStorageFromRequest(req), main).getType() ==
STORAGE_TYPE.SQL) {

View File

@ -32,6 +32,7 @@ import io.supertokens.session.Session;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.thirdparty.ThirdParty;
import io.supertokens.usermetadata.UserMetadata;
import io.supertokens.version.Version;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Rule;
@ -487,6 +488,8 @@ public class AuthRecipeTest {
fail();
}
boolean isMySQL = Version.getVersion(process.getProcess()).getPluginName().equals("mysql");
for (int limit : limits) {
// now we paginate in asc order
@ -496,6 +499,9 @@ public class AuthRecipeTest {
if (o1.timeJoined != o2.timeJoined) {
return (int) (o1.timeJoined - o2.timeJoined);
}
if (isMySQL) {
return o1.getSupertokensUserId().compareTo(o2.getSupertokensUserId());
}
return o2.getSupertokensUserId().compareTo(o1.getSupertokensUserId());
});

View File

@ -22,6 +22,7 @@ import io.supertokens.Main;
import io.supertokens.pluginInterface.PluginInterfaceTesting;
import io.supertokens.pluginInterface.useridmapping.UserIdMapping;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.test.httpRequest.HttpRequestForTesting;
import io.supertokens.test.httpRequest.HttpResponseException;
import io.supertokens.useridmapping.UserIdType;
@ -69,6 +70,8 @@ public abstract class Utils extends Mockito {
} catch (Exception ignored) {
}
TelemetryProvider.resetForTest();
} catch (Exception e) {
e.printStackTrace();
}

View File

@ -598,4 +598,44 @@ public class SessionTests {
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
@Test
public void testCreateSessionWithUserIdMappedForRecipeUser() throws Exception {
String[] args = {"../"};
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args, false);
FeatureFlagTestContent.getInstance(process.getProcess())
.setKeyValue(FeatureFlagTestContent.ENABLED_FEATURES, new EE_FEATURES[]{
EE_FEATURES.ACCOUNT_LINKING, EE_FEATURES.MULTI_TENANCY});
process.startProcess();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
if (StorageLayer.getStorage(process.getProcess()).getType() != STORAGE_TYPE.SQL) {
return;
}
AuthRecipeUserInfo user1 = EmailPassword.signUp(process.getProcess(), "test1@example.com", "password");
AuthRecipeUserInfo user2 = EmailPassword.signUp(process.getProcess(), "test2@example.com", "password");
AuthRecipe.createPrimaryUser(process.getProcess(), user1.getSupertokensUserId());
AuthRecipe.linkAccounts(process.getProcess(), user2.getSupertokensUserId(), user1.getSupertokensUserId());
UserIdMapping.createUserIdMapping(process.getProcess(), user1.getSupertokensUserId(), "extid1", null, false);
UserIdMapping.createUserIdMapping(process.getProcess(), user2.getSupertokensUserId(), "extid2", null, false);
SessionInformationHolder session1 = Session.createNewSession(process.getProcess(), user1.getSupertokensUserId(), new JsonObject(), new JsonObject());
SessionInformationHolder session2 = Session.createNewSession(process.getProcess(), user2.getSupertokensUserId(), new JsonObject(), new JsonObject());
SessionInformationHolder session3 = Session.createNewSession(process.getProcess(), "extid1", new JsonObject(), new JsonObject());
SessionInformationHolder session4 = Session.createNewSession(process.getProcess(), "extid2", new JsonObject(), new JsonObject());
assertEquals("extid1", session1.session.userId);
assertEquals("extid1", session1.session.recipeUserId);
assertEquals("extid1", session2.session.userId);
assertEquals("extid2", session2.session.recipeUserId);
assertEquals("extid1", session3.session.userId);
assertEquals("extid1", session3.session.recipeUserId);
assertEquals("extid1", session4.session.userId);
assertEquals("extid2", session4.session.recipeUserId);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
}

View File

@ -156,9 +156,9 @@ public class ActiveUserTest {
WebserverAPI.getLatestCDIVersion().get(), "");
}
// we don't remove the active user for the recipe user, so it should still be 2
// we remove the active user for the recipe user, so it should be 1
userCount = ActiveUsers.countUsersActiveSince(process.getProcess(), System.currentTimeMillis() - 10000);
assertEquals(2, userCount);
assertEquals(1, userCount);
// Sign in to the accounts once again
{
@ -188,7 +188,7 @@ public class ActiveUserTest {
// there should still be only one active user
userCount = ActiveUsers.countUsersActiveSince(process.getProcess(), System.currentTimeMillis() - 10000);
assertEquals(2, userCount);
assertEquals(1, userCount);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));

View File

@ -20,6 +20,8 @@ import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import io.supertokens.ProcessState;
import io.supertokens.authRecipe.AuthRecipe;
import io.supertokens.authRecipe.UserPaginationContainer;
import io.supertokens.emailpassword.EmailPassword;
import io.supertokens.emailpassword.exceptions.EmailChangeNotAllowedException;
import io.supertokens.featureflag.EE_FEATURES;
@ -32,6 +34,7 @@ import io.supertokens.passwordless.Passwordless;
import io.supertokens.passwordless.exceptions.*;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeUserInfo;
import io.supertokens.pluginInterface.authRecipe.LoginMethod;
import io.supertokens.pluginInterface.emailpassword.exceptions.DuplicateEmailException;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
@ -39,6 +42,7 @@ import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicExceptio
import io.supertokens.pluginInterface.multitenancy.*;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.passwordless.exception.DuplicateLinkCodeHashException;
import io.supertokens.pluginInterface.thirdparty.sqlStorage.ThirdPartySQLStorage;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.test.TestingProcessManager;
import io.supertokens.test.Utils;
@ -339,4 +343,66 @@ public class UserPaginationTest {
}
}
}
@Test
public void testUserPaginationWithSameTimeJoined() throws Exception {
if (StorageLayer.getBaseStorage(process.main).getType() != STORAGE_TYPE.SQL) {
return;
}
ThirdPartySQLStorage storage = (ThirdPartySQLStorage) StorageLayer.getBaseStorage(process.getProcess());
Set<String> userIds = new HashSet<>();
long timeJoined = System.currentTimeMillis();
for (int i = 0; i < 100; i++) {
String userId = io.supertokens.utils.Utils.getUUID();
storage.signUp(TenantIdentifier.BASE_TENANT, userId, "test"+i+"@example.com", new LoginMethod.ThirdParty("google", userId), timeJoined);
userIds.add(userId);
}
// Test ascending
{
Set<String> paginationUserIds = new HashSet<>();
UserPaginationContainer usersRes = AuthRecipe.getUsers(process.getProcess(), 10,
"ASC", null, null, null);
while (true) {
for (AuthRecipeUserInfo user : usersRes.users) {
paginationUserIds.add(user.getSupertokensUserId());
}
if (usersRes.nextPaginationToken == null) {
break;
}
usersRes = AuthRecipe.getUsers(process.getProcess(), 10,
"ASC", usersRes.nextPaginationToken, null, null);
}
assertEquals(userIds.size(), paginationUserIds.size());
assertEquals(userIds, paginationUserIds);
}
// Test descending
{
Set<String> paginationUserIds = new HashSet<>();
UserPaginationContainer usersRes = AuthRecipe.getUsers(process.getProcess(), 10,
"DESC", null, null, null);
while (true) {
for (AuthRecipeUserInfo user : usersRes.users) {
paginationUserIds.add(user.getSupertokensUserId());
}
if (usersRes.nextPaginationToken == null) {
break;
}
usersRes = AuthRecipe.getUsers(process.getProcess(), 10,
"DESC", usersRes.nextPaginationToken, null, null);
}
assertEquals(userIds.size(), paginationUserIds.size());
assertEquals(userIds, paginationUserIds);
}
}
}

View File

@ -108,6 +108,7 @@ public class RefreshTokenTest {
TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
long createdTime = System.currentTimeMillis();
TokenInfo tokenInfo = RefreshToken.createNewRefreshToken(process.getProcess(), "sessionHandle", "userId",
"parentRefreshTokenHash1", "antiCsrfToken");
@ -129,9 +130,8 @@ public class RefreshTokenTest {
assertEquals("antiCsrfToken", infoFromToken.antiCsrfToken);
assertNull(infoFromToken.parentRefreshTokenHash2);
assertSame(infoFromToken.type, TYPE.FREE_OPTIMISED);
// -5000 for some grace period for creation and checking above
assertTrue(tokenInfo.expiry > System.currentTimeMillis()
+ Config.getConfig(process.getProcess()).getRefreshTokenValidity() - 5000);
assertTrue(tokenInfo.expiry >= createdTime
+ Config.getConfig(process.getProcess()).getRefreshTokenValidity());
process.kill();
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STOPPED));

View File

@ -0,0 +1,202 @@
/*
* Copyright (c) 2021, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.test.session;
import com.google.gson.JsonObject;
import io.supertokens.ProcessState;
import io.supertokens.exceptions.TryRefreshTokenException;
import io.supertokens.exceptions.UnauthorisedException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.session.SessionStorage;
import io.supertokens.session.Session;
import io.supertokens.session.accessToken.AccessToken;
import io.supertokens.session.info.SessionInformationHolder;
import io.supertokens.session.jwt.JWT;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.test.TestingProcessManager;
import io.supertokens.test.Utils;
import org.junit.*;
import org.junit.rules.TestRule;
import static junit.framework.TestCase.*;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
public class SessionTest6 {
@Rule
public TestRule watchman = Utils.getOnFailure();
@AfterClass
public static void afterTesting() {
Utils.afterTesting();
}
@Before
public void beforeEach() {
Utils.reset();
}
@Test
public void createRefreshSwitchVerify() throws Exception {
String[] args = {"../"};
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
String userId = "userId";
JsonObject userDataInJWT = new JsonObject();
userDataInJWT.addProperty("key", "value");
JsonObject userDataInDatabase = new JsonObject();
userDataInDatabase.addProperty("key", "value");
SessionInformationHolder sessionInfo = Session.createNewSession(process.getProcess(), userId, userDataInJWT,
userDataInDatabase, false, AccessToken.getLatestVersion(), false);
checkIfUsingStaticKey(sessionInfo, false);
sessionInfo = Session.refreshSession(new AppIdentifier(null, null), process.getProcess(), sessionInfo.refreshToken.token,
sessionInfo.antiCsrfToken, false, AccessToken.getLatestVersion(), true);
assert sessionInfo.refreshToken != null;
assert sessionInfo.accessToken != null;
checkIfUsingStaticKey(sessionInfo, true);
SessionInformationHolder verifiedSession = Session.getSession(process.getProcess(), sessionInfo.accessToken.token,
sessionInfo.antiCsrfToken, false, true, false);
checkIfUsingStaticKey(verifiedSession, true);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
@Test
public void createRefreshSwitchRegen() throws Exception {
String[] args = {"../"};
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
String userId = "userId";
JsonObject userDataInJWT = new JsonObject();
userDataInJWT.addProperty("key", "value");
JsonObject userDataInDatabase = new JsonObject();
userDataInDatabase.addProperty("key", "value");
SessionInformationHolder sessionInfo = Session.createNewSession(process.getProcess(), userId, userDataInJWT,
userDataInDatabase, false, AccessToken.getLatestVersion(), false);
checkIfUsingStaticKey(sessionInfo, false);
sessionInfo = Session.refreshSession(new AppIdentifier(null, null), process.getProcess(), sessionInfo.refreshToken.token,
sessionInfo.antiCsrfToken, false, AccessToken.getLatestVersion(), true);
assert sessionInfo.refreshToken != null;
assert sessionInfo.accessToken != null;
checkIfUsingStaticKey(sessionInfo, true);
SessionInformationHolder newSessionInfo = Session.regenerateToken(process.getProcess(),
sessionInfo.accessToken.token, userDataInJWT);
checkIfUsingStaticKey(newSessionInfo, true);
SessionInformationHolder getSessionResponse = Session.getSession(process.getProcess(),
newSessionInfo.accessToken.token, sessionInfo.antiCsrfToken, false, true, false);
checkIfUsingStaticKey(getSessionResponse, true);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
@Test
public void createRefreshRefreshSwitchVerify() throws Exception {
String[] args = {"../"};
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
String userId = "userId";
JsonObject userDataInJWT = new JsonObject();
userDataInJWT.addProperty("key", "value");
JsonObject userDataInDatabase = new JsonObject();
userDataInDatabase.addProperty("key", "value");
SessionInformationHolder sessionInfo = Session.createNewSession(process.getProcess(), userId, userDataInJWT,
userDataInDatabase, false, AccessToken.getLatestVersion(), false);
checkIfUsingStaticKey(sessionInfo, false);
sessionInfo = Session.refreshSession(new AppIdentifier(null, null), process.getProcess(), sessionInfo.refreshToken.token,
sessionInfo.antiCsrfToken, false, AccessToken.getLatestVersion(), false);
sessionInfo = Session.refreshSession(new AppIdentifier(null, null), process.getProcess(), sessionInfo.refreshToken.token,
sessionInfo.antiCsrfToken, false, AccessToken.getLatestVersion(), true);
assert sessionInfo.refreshToken != null;
assert sessionInfo.accessToken != null;
checkIfUsingStaticKey(sessionInfo, true);
SessionInformationHolder verifiedSession = Session.getSession(process.getProcess(), sessionInfo.accessToken.token,
sessionInfo.antiCsrfToken, false, true, false);
checkIfUsingStaticKey(verifiedSession, true);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
@Test
public void createRefreshRefreshSwitchRegen() throws Exception {
String[] args = {"../"};
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
String userId = "userId";
JsonObject userDataInJWT = new JsonObject();
userDataInJWT.addProperty("key", "value");
JsonObject userDataInDatabase = new JsonObject();
userDataInDatabase.addProperty("key", "value");
SessionInformationHolder sessionInfo = Session.createNewSession(process.getProcess(), userId, userDataInJWT,
userDataInDatabase, false, AccessToken.getLatestVersion(), false);
checkIfUsingStaticKey(sessionInfo, false);
sessionInfo = Session.refreshSession(new AppIdentifier(null, null), process.getProcess(), sessionInfo.refreshToken.token,
sessionInfo.antiCsrfToken, false, AccessToken.getLatestVersion(), false);
sessionInfo = Session.refreshSession(new AppIdentifier(null, null), process.getProcess(), sessionInfo.refreshToken.token,
sessionInfo.antiCsrfToken, false, AccessToken.getLatestVersion(), true);
assert sessionInfo.refreshToken != null;
assert sessionInfo.accessToken != null;
checkIfUsingStaticKey(sessionInfo, true);
SessionInformationHolder newSessionInfo = Session.regenerateToken(process.getProcess(),
sessionInfo.accessToken.token, userDataInJWT);
checkIfUsingStaticKey(newSessionInfo, true);
SessionInformationHolder getSessionResponse = Session.getSession(process.getProcess(),
newSessionInfo.accessToken.token, sessionInfo.antiCsrfToken, false, true, false);
checkIfUsingStaticKey(getSessionResponse, true);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
private static void checkIfUsingStaticKey(SessionInformationHolder info, boolean shouldBeStatic) throws JWT.JWTException {
assert info.accessToken != null;
JWT.JWTPreParseInfo tokenInfo = JWT.preParseJWTInfo(info.accessToken.token);
assert tokenInfo.kid != null;
if (shouldBeStatic) {
assert tokenInfo.kid.startsWith("s-");
} else {
assert tokenInfo.kid.startsWith("d-");
}
}
}

View File

@ -76,7 +76,7 @@ public class RefreshSessionAPITest2_21 {
JsonObject response = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session/refresh", sessionRefreshBody, 1000, 1000, null,
Utils.getCdiVersionStringLatestForTests(), "session");
SemVer.v2_21.get(), "session");
assertEquals(response.entrySet().size(), 2);
assertEquals(response.get("status").getAsString(), "UNAUTHORISED");

View File

@ -0,0 +1,207 @@
/*
* Copyright (c) 2021, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.test.session.api;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
import io.supertokens.ProcessState;
import io.supertokens.session.jwt.JWT;
import io.supertokens.test.TestingProcessManager;
import io.supertokens.test.Utils;
import io.supertokens.test.httpRequest.HttpRequestForTesting;
import io.supertokens.utils.SemVer;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertNotNull;
public class RefreshSessionAPITest3_0 {
@Rule
public TestRule watchman = Utils.getOnFailure();
@AfterClass
public static void afterTesting() {
Utils.afterTesting();
}
@Before
public void beforeEach() {
Utils.reset();
}
@Test
public void successOutputWithValidRefreshTokenTest() throws Exception {
String[] args = { "../" };
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
String userId = "userId";
JsonObject userDataInJWT = new JsonObject();
userDataInJWT.add("nullProp", JsonNull.INSTANCE);
userDataInJWT.addProperty("key", "value");
JsonObject userDataInDatabase = new JsonObject();
userDataInDatabase.addProperty("key", "value");
JsonObject request = new JsonObject();
request.addProperty("userId", userId);
request.add("userDataInJWT", userDataInJWT);
request.add("userDataInDatabase", userDataInDatabase);
request.addProperty("enableAntiCsrf", false);
JsonObject sessionInfo = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session", request, 1000, 1000, null, SemVer.v2_7.get(),
"session");
assertEquals(sessionInfo.get("status").getAsString(), "OK");
JsonObject sessionRefreshBody = new JsonObject();
sessionRefreshBody.addProperty("refreshToken",
sessionInfo.get("refreshToken").getAsJsonObject().get("token").getAsString());
sessionRefreshBody.addProperty("enableAntiCsrf", false);
JsonObject sessionRefreshResponse = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session/refresh", sessionRefreshBody, 1000, 1000, null,
SemVer.v3_0.get(), "session");
checkRefreshSessionResponse(sessionRefreshResponse, process, userId, userDataInJWT, false, false);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
@Test
public void successOutputUpgradeWithNonStaticKeySessionTest() throws Exception {
String[] args = { "../" };
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
String userId = "userId";
JsonObject userDataInJWT = new JsonObject();
userDataInJWT.add("nullProp", JsonNull.INSTANCE);
userDataInJWT.addProperty("key", "value");
JsonObject userDataInDatabase = new JsonObject();
userDataInDatabase.addProperty("key", "value");
JsonObject request = new JsonObject();
request.addProperty("userId", userId);
request.add("userDataInJWT", userDataInJWT);
request.add("userDataInDatabase", userDataInDatabase);
request.addProperty("enableAntiCsrf", false);
JsonObject sessionInfo = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session", request, 1000, 1000, null, SemVer.v2_7.get(),
"session");
assertEquals(sessionInfo.get("status").getAsString(), "OK");
JsonObject sessionRefreshBody = new JsonObject();
sessionRefreshBody.addProperty("refreshToken",
sessionInfo.get("refreshToken").getAsJsonObject().get("token").getAsString());
sessionRefreshBody.addProperty("enableAntiCsrf", false);
sessionRefreshBody.addProperty("useDynamicSigningKey", true);
JsonObject sessionRefreshResponse = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session/refresh", sessionRefreshBody, 1000, 1000, null,
SemVer.v3_0.get(), "session");
checkRefreshSessionResponse(sessionRefreshResponse, process, userId, userDataInJWT, false, false);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
@Test
public void successOutputUpgradeWithStaticKeySessionTest() throws Exception {
String[] args = { "../" };
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
String userId = "userId";
JsonObject userDataInJWT = new JsonObject();
userDataInJWT.add("nullProp", JsonNull.INSTANCE);
userDataInJWT.addProperty("key", "value");
JsonObject userDataInDatabase = new JsonObject();
userDataInDatabase.addProperty("key", "value");
JsonObject request = new JsonObject();
request.addProperty("userId", userId);
request.add("userDataInJWT", userDataInJWT);
request.add("userDataInDatabase", userDataInDatabase);
request.addProperty("enableAntiCsrf", false);
JsonObject sessionInfo = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session", request, 1000, 1000, null, SemVer.v2_7.get(),
"session");
assertEquals(sessionInfo.get("status").getAsString(), "OK");
JsonObject sessionRefreshBody = new JsonObject();
sessionRefreshBody.addProperty("refreshToken",
sessionInfo.get("refreshToken").getAsJsonObject().get("token").getAsString());
sessionRefreshBody.addProperty("enableAntiCsrf", false);
sessionRefreshBody.addProperty("useDynamicSigningKey", false);
JsonObject sessionRefreshResponse = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session/refresh", sessionRefreshBody, 1000, 1000, null,
SemVer.v3_0.get(), "session");
checkRefreshSessionResponse(sessionRefreshResponse, process, userId, userDataInJWT, false, true);
process.kill();
assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STOPPED));
}
private static void checkRefreshSessionResponse(JsonObject response, TestingProcessManager.TestingProcess process,
String userId, JsonObject userDataInJWT, boolean hasAntiCsrf, boolean useStaticKey) throws
JWT.JWTException {
assertNotNull(response.get("session").getAsJsonObject().get("handle").getAsString());
assertEquals(response.get("session").getAsJsonObject().get("userId").getAsString(), userId);
assertEquals(response.get("session").getAsJsonObject().get("tenantId").getAsString(), "public");
assertEquals(response.get("session").getAsJsonObject().get("userDataInJWT").getAsJsonObject().toString(),
userDataInJWT.toString());
assertEquals(response.get("session").getAsJsonObject().entrySet().size(), 4);
assertTrue(response.get("accessToken").getAsJsonObject().has("token"));
assertTrue(response.get("accessToken").getAsJsonObject().has("expiry"));
assertTrue(response.get("accessToken").getAsJsonObject().has("createdTime"));
assertEquals(response.get("accessToken").getAsJsonObject().entrySet().size(), 3);
JWT.JWTPreParseInfo tokenInfo = JWT.preParseJWTInfo(response.get("accessToken").getAsJsonObject().get("token").getAsString());
if (useStaticKey) {
assert(tokenInfo.kid.startsWith("s-"));
} else {
assert(tokenInfo.kid.startsWith("d-"));
}
assertTrue(response.get("refreshToken").getAsJsonObject().has("token"));
assertTrue(response.get("refreshToken").getAsJsonObject().has("expiry"));
assertTrue(response.get("refreshToken").getAsJsonObject().has("createdTime"));
assertEquals(response.get("refreshToken").getAsJsonObject().entrySet().size(), 3);
assertEquals(response.has("antiCsrfToken"), hasAntiCsrf);
assertEquals(response.entrySet().size(), hasAntiCsrf ? 5 : 4);
}
}

View File

@ -107,6 +107,7 @@ public class SessionRegenerateAPITest2_21 {
sessionRefreshBody.addProperty("refreshToken",
sessionInfo.get("refreshToken").getAsJsonObject().get("token").getAsString());
sessionRefreshBody.addProperty("enableAntiCsrf", false);
sessionRefreshBody.addProperty("useDynamicSigningKey", true);
JsonObject sessionRefreshResponse = HttpRequestForTesting.sendJsonPOSTRequest(process.getProcess(), "",
"http://localhost:3567/recipe/session/refresh", sessionRefreshBody, 1000, 1000, null,

View File

@ -583,7 +583,8 @@ public class UserIdMappingStorageTest {
storage.createUserIdMapping(new AppIdentifier(null, null), superTokensUserId, externalUserId,
null);
}
HashMap<String, String> response = storage.getUserIdMappingForSuperTokensIds(superTokensUserIdList);
HashMap<String, String> response =
storage.getUserIdMappingForSuperTokensIds(new AppIdentifier(null, null), superTokensUserIdList);
assertEquals(AuthRecipe.USER_PAGINATION_LIMIT, response.size());
for (int i = 0; i < response.size(); i++) {
assertEquals(externalUserIdList.get(i), response.get(superTokensUserIdList.get(i)));
@ -606,7 +607,8 @@ public class UserIdMappingStorageTest {
UserIdMappingStorage storage = (UserIdMappingStorage) StorageLayer.getStorage(process.main);
ArrayList<String> emptyList = new ArrayList<>();
HashMap<String, String> response = storage.getUserIdMappingForSuperTokensIds(emptyList);
HashMap<String, String> response =
storage.getUserIdMappingForSuperTokensIds(new AppIdentifier(null, null), emptyList);
assertEquals(0, response.size());
process.kill();
@ -631,7 +633,8 @@ public class UserIdMappingStorageTest {
superTokensUserIdList.add(userInfo.getSupertokensUserId());
}
HashMap<String, String> userIdMapping = storage.getUserIdMappingForSuperTokensIds(superTokensUserIdList);
HashMap<String, String> userIdMapping =
storage.getUserIdMappingForSuperTokensIds(new AppIdentifier(null, null), superTokensUserIdList);
assertEquals(0, userIdMapping.size());
process.kill();
@ -668,7 +671,8 @@ public class UserIdMappingStorageTest {
}
// retrieve UserIDMapping
HashMap<String, String> response = storage.getUserIdMappingForSuperTokensIds(superTokensUserIdList);
HashMap<String, String> response =
storage.getUserIdMappingForSuperTokensIds(new AppIdentifier(null, null), superTokensUserIdList);
assertEquals(5, response.size());
// check that the last 5 users have their ids mapped

55
stress-tests/.gitignore vendored Normal file
View File

@ -0,0 +1,55 @@
# Dependencies
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
package-lock.json
yarn.lock
# Environment variables
.env
.env.local
.env.*.local
# Build output
dist/
build/
out/
# Logs
logs/
*.log
# IDE and editor files
.idea/
.vscode/
*.swp
*.swo
.DS_Store
Thumbs.db
# Testing
coverage/
.nyc_output/
# Temporary files
*.tmp
*.temp
.cache/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
users/

8
stress-tests/.prettierrc Normal file
View File

@ -0,0 +1,8 @@
{
"semi": true,
"trailingComma": "es5",
"singleQuote": true,
"printWidth": 100,
"tabWidth": 2,
"useTabs": false
}

View File

@ -0,0 +1,66 @@
version: '3'
services:
# Note: If you are assigning a custom name to your db service on the line below, make sure it does not contain underscores
db:
image: 'postgres:latest'
environment:
POSTGRES_USER: supertokens
POSTGRES_PASSWORD: supertokens
POSTGRES_DB: supertokens
command: postgres -c shared_preload_libraries='pg_stat_statements' -c pg_stat_statements.track=all -c max_connections=1000 -c shared_buffers=1GB -c synchronous_commit=off -c wal_buffers=16MB -c checkpoint_timeout=30min -c max_wal_size=4GB
ports:
- 5432:5432
networks:
- app_network
restart: unless-stopped
healthcheck:
test: ['CMD', 'pg_isready', '-U', 'supertokens', '-d', 'supertokens']
interval: 5s
timeout: 5s
retries: 5
supertokens:
image: supertokens/supertokens-postgresql
# platform: linux/amd64
depends_on:
db:
condition: service_healthy
ports:
- 3567:3567
environment:
POSTGRESQL_CONNECTION_URI: "postgresql://supertokens:supertokens@db:5432/supertokens"
PASSWORD_HASHING_ALG: "ARGON2"
ARGON2_ITERATIONS: 1
ARGON2_MEMORY_KB: 8
ARGON2_PARALLELISM: 1
ARGON2_HASHING_POOL_SIZE: 8
API_KEYS: "qwertyuiopasdfghjklzxcvbnm"
BULK_MIGRATION_PARALLELISM: "4"
BULK_MIGRATION_BATCH_SIZE: "500"
networks:
- app_network
restart: unless-stopped
healthcheck:
test: >
bash -c 'exec 3<>/dev/tcp/127.0.0.1/3567 && echo -e "GET /hello HTTP/1.1\r\nhost: 127.0.0.1:3567\r\nConnection: close\r\n\r\n" >&3 && cat <&3 | grep "Hello"'
interval: 10s
timeout: 5s
retries: 5
pghero:
image: ankane/pghero
environment:
DATABASE_URL: "postgres://supertokens:supertokens@db:5432/supertokens"
ports:
- 8080:8080
networks:
- app_network
depends_on:
- db
restart: unless-stopped
networks:
app_network:
driver: bridge

27
stress-tests/package.json Normal file
View File

@ -0,0 +1,27 @@
{
"name": "stress-tests",
"version": "1.0.0",
"description": "Stress tests for SuperTokens",
"main": "dist/index.js",
"scripts": {
"build": "tsc",
"start": "node dist/index.js",
"generate-users": "rm -rf users && mkdir -p users && ts-node src/oneMillionUsers/generateUsers.ts",
"one-million-users": "ts-node src/oneMillionUsers/index.ts",
"format": "prettier --write \"**/*.{ts,js,json}\""
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@types/node": "^20.11.24",
"prettier": "^3.5.3",
"ts-node": "^10.9.2",
"typescript": "^5.3.3"
},
"dependencies": {
"@types/uuid": "^10.0.0",
"supertokens-node": "16.7.6",
"uuid": "^11.1.0"
}
}

View File

@ -0,0 +1,143 @@
import * as fs from 'fs';
export const LICENSE_FOR_TEST =
'E1yITHflaFS4BPm7n0bnfFCjP4sJoTERmP0J=kXQ5YONtALeGnfOOe2rf2QZ0mfOh0aO3pBqfF-S0jb0ABpat6pySluTpJO6jieD6tzUOR1HrGjJO=50Ob3mHi21tQH1';
export const createStInstanceForTest = async () => {
return {
deployment_id: '1234567890',
core_url: 'http://localhost:3567',
api_key: 'qwertyuiopasdfghjklzxcvbnm',
};
};
export const deleteStInstance = async (deploymentId: string) => {
// noop
};
export const formatTime = (ms: number): string => {
const seconds = Math.floor(ms / 1000);
if (seconds < 60) {
return `${seconds}s`;
}
const minutes = Math.floor(seconds / 60);
const remainingSeconds = seconds % 60;
return `${minutes}m ${remainingSeconds}s`;
};
export const workInBatches = async <T>(
count: number,
numberOfBatches: number,
work: (idx: number) => Promise<T>
): Promise<T[]> => {
const batchSize = Math.ceil(count / numberOfBatches);
const batches = [];
let workCount = 0;
const st = Date.now();
let done = numberOfBatches;
for (let b = 0; b < numberOfBatches; b++) {
batches.push(
(async () => {
const startIndex = b * batchSize;
const endIndex = Math.min(startIndex + batchSize, count);
const batchResults: T[] = [];
for (let i = startIndex; i < endIndex; i++) {
batchResults.push(await work(i));
workCount++;
}
done--;
return batchResults;
})()
);
}
batches.push(
(async () => {
while (done > 0) {
await new Promise((resolve) => setTimeout(resolve, 5000));
const en = Date.now();
console.log(
` Progress: Time=${formatTime(en - st)}, Completed=${workCount}, Throughput=${Math.round((workCount / (en - st)) * 10000) / 10}/s`
);
}
return [];
})()
);
const results = await Promise.all(batches);
return results.flat();
};
export const setupLicense = async (coreUrl: string, apiKey: string) => {
try {
const response = await fetch(`${coreUrl}/ee/license`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'api-key': apiKey,
},
body: JSON.stringify({
licenseKey: LICENSE_FOR_TEST,
}),
});
if (!response.ok) {
throw new Error(`Failed with status: ${response.status}`);
}
const responseText = await response.text();
console.log('License response:', responseText);
console.log('License key set successfully');
} catch (error) {
console.error('Failed to set license key:', error);
throw error;
}
};
export class StatsCollector {
private static instance: StatsCollector;
private measurements: { title: string; timeMs: number }[] = [];
private constructor() {}
public static getInstance(): StatsCollector {
if (!StatsCollector.instance) {
StatsCollector.instance = new StatsCollector();
}
return StatsCollector.instance;
}
public addMeasurement(title: string, timeMs: number) {
this.measurements.push({ title, timeMs });
}
public getStats() {
return this.measurements;
}
public writeToFile() {
const formattedMeasurements = this.measurements.map((measurement) => ({
title: measurement.title,
ms: measurement.timeMs,
formatted: formatTime(measurement.timeMs),
}));
const stats = {
measurements: formattedMeasurements,
timestamp: new Date().toISOString(),
};
fs.writeFileSync('stats.json', JSON.stringify(stats, null, 2));
}
}
export const measureTime = async <T>(title: string, fn: () => Promise<T>): Promise<T> => {
const st = Date.now();
const result = await fn();
const et = Date.now();
const timeMs = et - st;
console.log(` ${title} took ${formatTime(timeMs)}`);
StatsCollector.getInstance().addMeasurement(title, timeMs);
return result;
};

View File

@ -0,0 +1,24 @@
import SuperTokens from 'supertokens-node';
import AccountLinking from 'supertokens-node/recipe/accountlinking';
import { measureTime, workInBatches } from '../common/utils';
export const doAccountLinking = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[][]
) => {
console.log('\n\n2. Linking accounts');
await measureTime('Linking accounts', async () => {
await workInBatches(users.length, 8, async (idx) => {
const userSet = users[idx]!;
await AccountLinking.createPrimaryUser(
SuperTokens.convertToRecipeUserId(userSet[0].recipeUserId)
);
for (const user of userSet.slice(1)) {
await AccountLinking.linkAccounts(
SuperTokens.convertToRecipeUserId(user.recipeUserId),
userSet[0].recipeUserId
);
}
});
});
};

View File

@ -0,0 +1,18 @@
import SuperTokens from 'supertokens-node';
import UserRoles from 'supertokens-node/recipe/userroles';
import { measureTime, workInBatches } from '../common/utils';
export const addRoles = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[]
) => {
console.log('\n\n4. Adding roles');
await measureTime('Adding roles', async () => {
await UserRoles.createNewRoleOrAddPermissions('admin', ['p1', 'p2']);
await workInBatches(users.length, 8, async (idx) => {
const user = users[idx]!;
await UserRoles.addRoleToUser('public', user.recipeUserId, 'admin');
});
});
};

View File

@ -0,0 +1,19 @@
import SuperTokens from 'supertokens-node';
import Session from 'supertokens-node/recipe/session';
import { measureTime, workInBatches } from '../common/utils';
export const createSessions = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[]
) => {
console.log('\n\n5. Creating sessions');
await measureTime('Creating sessions', async () => {
await workInBatches(users.length, 8, async (idx) => {
const user = users[idx]!;
await Session.createNewSessionWithoutRequestResponse(
'public',
SuperTokens.convertToRecipeUserId(user.recipeUserId),
);
});
});
};

View File

@ -0,0 +1,25 @@
import { measureTime, workInBatches } from '../common/utils';
import SuperTokens from 'supertokens-node';
export const createUserIdMappings = async (
users: { recipeUserId: string; email?: string; phoneNumber?: string }[]
) => {
console.log('\n\n3. Create user id mappings');
await measureTime('Create user id mappings', async () => {
await workInBatches(users.length, 8, async (idx) => {
const user = users[idx]!;
if (Math.random() < 0.5) {
const newUserId = Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('');
await SuperTokens.createUserIdMapping({
superTokensUserId: user.recipeUserId,
externalUserId: newUserId,
});
user.recipeUserId = newUserId;
}
});
});
};

View File

@ -0,0 +1,128 @@
import EmailPassword from 'supertokens-node/recipe/emailpassword';
import Passwordless from 'supertokens-node/recipe/passwordless';
import ThirdParty from 'supertokens-node/recipe/thirdparty';
import { workInBatches, measureTime } from '../common/utils';
const TOTAL_USERS = 10000;
const createEmailPasswordUsers = async () => {
console.log(` Creating EmailPassword users...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const email =
Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('') + '@example.com';
const createdUser = await EmailPassword.signUp('public', email, 'password');
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
email: email,
};
}
});
};
const createPasswordlessUsersWithEmail = async () => {
console.log(` Creating Passwordless users (with email)...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const email =
Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('') + '@example.com';
const createdUser = await Passwordless.signInUp({
tenantId: 'public',
email,
});
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
email,
};
}
});
};
const createPasswordlessUsersWithPhone = async () => {
console.log(` Creating Passwordless users (with phone)...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const phoneNumber = `+1${Math.floor(Math.random() * 10000000000)}`;
const createdUser = await Passwordless.signInUp({
tenantId: 'public',
phoneNumber,
});
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
phoneNumber,
};
}
});
};
const createThirdPartyUsers = async (thirdPartyId: string) => {
console.log(` Creating ThirdParty (${thirdPartyId}) users...`);
return await workInBatches(Math.floor(TOTAL_USERS / 5), 4, async (idx) => {
const email =
Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('') + '@example.com';
const tpUserId = Array(64)
.fill(0)
.map(() => String.fromCharCode(97 + Math.floor(Math.random() * 26)))
.join('');
const createdUser = await ThirdParty.manuallyCreateOrUpdateUser(
'public',
thirdPartyId,
tpUserId,
email,
true
);
// expect(createdUser.status).toBe("OK");
if (createdUser.status === 'OK') {
return {
recipeUserId: createdUser.user.id,
email,
};
}
});
};
export const createUsers = async () => {
console.log('\n\n1. Create one million users');
const epUsers = await measureTime('Emailpassword users creation', createEmailPasswordUsers);
const plessEmailUsers = await measureTime(
'Passwordless users (with email) creation',
createPasswordlessUsersWithEmail
);
const plessPhoneUsers = await measureTime(
'Passwordless users (with phone) creation',
createPasswordlessUsersWithPhone
);
const tpUsers1 = await measureTime('ThirdParty users (google) creation', () =>
createThirdPartyUsers('google')
);
const tpUsers2 = await measureTime('ThirdParty users (facebook) creation', () =>
createThirdPartyUsers('facebook')
);
return {
epUsers,
plessEmailUsers,
plessPhoneUsers,
tpUsers1,
tpUsers2,
};
};

View File

@ -0,0 +1,193 @@
import * as fs from 'fs';
import { v4 as uuidv4 } from 'uuid';
const USERS_TO_GENERATE = 1000000;
const USERS_PER_JSON = 10000;
const n = Math.floor(USERS_TO_GENERATE / USERS_PER_JSON);
const generatedEmails = new Set<string>();
const generatedPhoneNumbers = new Set<string>();
const generatedUserIds = new Set<string>();
interface LoginMethod {
tenantIds: string[];
email: string;
recipeId: string;
passwordHash?: string;
hashingAlgorithm?: string;
thirdPartyId?: string;
thirdPartyUserId?: string;
phoneNumber?: string;
isVerified: boolean;
isPrimary: boolean;
timeJoinedInMSSinceEpoch: number;
}
interface User {
externalUserId: string;
userRoles: Array<{
role: string;
tenantIds: string[];
}>;
loginMethods: LoginMethod[];
}
function createEmailLoginMethod(email: string, tenantIds: string[]): LoginMethod {
return {
tenantIds,
email,
recipeId: 'emailpassword',
passwordHash: '$argon2d$v=19$m=12,t=3,p=1$aGI4enNvMmd0Zm0wMDAwMA$r6p7qbr6HD+8CD7sBi4HVw',
hashingAlgorithm: 'argon2',
isVerified: true,
isPrimary: false,
timeJoinedInMSSinceEpoch:
Math.floor(Math.random() * (Date.now() - 3 * 365 * 24 * 60 * 60 * 1000)) +
3 * 365 * 24 * 60 * 60 * 1000,
};
}
function createThirdPartyLoginMethod(email: string, tenantIds: string[]): LoginMethod {
return {
tenantIds,
recipeId: 'thirdparty',
email,
thirdPartyId: 'google',
thirdPartyUserId: String(hashCode(email)),
isVerified: true,
isPrimary: false,
timeJoinedInMSSinceEpoch:
Math.floor(Math.random() * (Date.now() - 3 * 365 * 24 * 60 * 60 * 1000)) +
3 * 365 * 24 * 60 * 60 * 1000,
};
}
function createPasswordlessLoginMethod(
email: string,
tenantIds: string[],
phoneNumber: string
): LoginMethod {
return {
tenantIds,
email,
recipeId: 'passwordless',
phoneNumber,
isVerified: true,
isPrimary: false,
timeJoinedInMSSinceEpoch:
Math.floor(Math.random() * (Date.now() - 3 * 365 * 24 * 60 * 60 * 1000)) +
3 * 365 * 24 * 60 * 60 * 1000,
};
}
function hashCode(str: string): number {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash;
}
return hash;
}
function generateRandomString(length: number, chars: string): string {
let result = '';
for (let i = 0; i < length; i++) {
result += chars.charAt(Math.floor(Math.random() * chars.length));
}
return result;
}
function generateRandomEmail(): string {
return `${generateRandomString(24, 'abcdefghijklmnopqrstuvwxyz')}@example.com`;
}
function generateRandomPhoneNumber(): string {
return `+91${generateRandomString(10, '0123456789')}`;
}
function genUser(): User {
const user: User = {
externalUserId: '',
userRoles: [
{ role: 'role1', tenantIds: ['public'] },
{ role: 'role2', tenantIds: ['public'] },
],
loginMethods: [],
};
let userId = `e-${uuidv4()}`;
while (generatedUserIds.has(userId)) {
userId = `e-${uuidv4()}`;
}
generatedUserIds.add(userId);
user.externalUserId = userId;
const tenantIds = ['public'];
let email = generateRandomEmail();
while (generatedEmails.has(email)) {
email = generateRandomEmail();
}
generatedEmails.add(email);
const loginMethods: LoginMethod[] = [];
// Always add email login method
loginMethods.push(createEmailLoginMethod(email, tenantIds));
// 50% chance to add third party login
if (Math.random() < 0.5) {
loginMethods.push(createThirdPartyLoginMethod(email, tenantIds));
}
// 50% chance to add passwordless login
if (Math.random() < 0.5) {
let phoneNumber = generateRandomPhoneNumber();
while (generatedPhoneNumbers.has(phoneNumber)) {
phoneNumber = generateRandomPhoneNumber();
}
generatedPhoneNumbers.add(phoneNumber);
loginMethods.push(createPasswordlessLoginMethod(email, tenantIds, phoneNumber));
}
// If no methods were added, randomly add one
if (loginMethods.length === 0) {
const methodNumber = Math.floor(Math.random() * 3);
if (methodNumber === 0) {
loginMethods.push(createEmailLoginMethod(email, tenantIds));
} else if (methodNumber === 1) {
loginMethods.push(createThirdPartyLoginMethod(email, tenantIds));
} else {
let phoneNumber = generateRandomPhoneNumber();
while (generatedPhoneNumbers.has(phoneNumber)) {
phoneNumber = generateRandomPhoneNumber();
}
generatedPhoneNumbers.add(phoneNumber);
loginMethods.push(createPasswordlessLoginMethod(email, tenantIds, phoneNumber));
}
}
loginMethods[Math.floor(Math.random() * loginMethods.length)].isPrimary = true;
user.loginMethods = loginMethods;
return user;
}
// Create users directory if it doesn't exist
if (!fs.existsSync('users')) {
fs.mkdirSync('users');
}
for (let i = 0; i < n; i++) {
console.log(`Generating ${USERS_PER_JSON} users for ${i}`);
const users: User[] = [];
for (let j = 0; j < USERS_PER_JSON; j++) {
users.push(genUser());
}
fs.writeFileSync(
`users/users-${i.toString().padStart(4, '0')}.json`,
JSON.stringify({ users }, null, 2)
);
}

View File

@ -0,0 +1,149 @@
import {
createStInstanceForTest,
deleteStInstance,
setupLicense,
StatsCollector,
} from '../common/utils';
import SuperTokens from 'supertokens-node';
import EmailPassword from 'supertokens-node/recipe/emailpassword';
import Passwordless from 'supertokens-node/recipe/passwordless';
import ThirdParty from 'supertokens-node/recipe/thirdparty';
import UserRoles from 'supertokens-node/recipe/userroles';
import Session from 'supertokens-node/recipe/session';
import { createUsers } from './createUsers';
import { doAccountLinking } from './accountLinking';
import { createUserIdMappings } from './createUserIdMappings';
import { addRoles } from './addRoles';
import { createSessions } from './createSessions';
function stInit(connectionURI: string, apiKey: string) {
SuperTokens.init({
appInfo: {
appName: 'SuperTokens',
apiDomain: 'http://localhost:3001',
websiteDomain: 'http://localhost:3000',
apiBasePath: '/auth',
websiteBasePath: '/auth',
},
supertokens: {
connectionURI: connectionURI,
apiKey: apiKey,
},
recipeList: [
EmailPassword.init(),
Passwordless.init({
contactMethod: 'EMAIL_OR_PHONE',
flowType: 'USER_INPUT_CODE',
}),
ThirdParty.init({
signInAndUpFeature: {
providers: [
{
config: { thirdPartyId: 'google' },
},
{
config: { thirdPartyId: 'facebook' },
},
],
},
}),
UserRoles.init(),
Session.init(),
],
});
}
async function main() {
const deployment = await createStInstanceForTest();
console.log(`Deployment created: ${deployment.core_url}`);
try {
stInit(deployment.core_url, deployment.api_key);
await setupLicense(deployment.core_url, deployment.api_key);
// 1. Create one million users
const users = await createUsers();
// Randomly create groups of users for linking
const allUsers: ({ recipeUserId: string; email?: string; phoneNumber?: string } | undefined)[] =
[
...users.epUsers,
...users.plessEmailUsers,
...users.plessPhoneUsers,
...users.tpUsers1,
...users.tpUsers2,
];
const usersToLink: { recipeUserId: string; email?: string; phoneNumber?: string }[][] = [];
while (allUsers.length > 0) {
const userSet: { recipeUserId: string; email?: string; phoneNumber?: string }[] = [];
const numAccounts = Math.min(Math.floor(Math.random() * 5 + 1), allUsers.length);
for (let i = 0; i < numAccounts; i++) {
const randomIndex = Math.floor(Math.random() * allUsers.length);
userSet.push(allUsers[randomIndex]!);
allUsers.splice(randomIndex, 1);
}
usersToLink.push(userSet);
}
// 2. Link accounts
await doAccountLinking(usersToLink);
// 3. Create user id mappings
const allUsersForMapping = [
...users.epUsers,
...users.plessEmailUsers,
...users.plessPhoneUsers,
...users.tpUsers1,
...users.tpUsers2,
].filter((user) => user !== undefined) as {
recipeUserId: string;
email?: string;
phoneNumber?: string;
}[];
await createUserIdMappings(allUsersForMapping);
// 4. Add roles
await addRoles(allUsersForMapping);
// 5. Create sessions
await createSessions(allUsersForMapping);
// 6. List all users
console.log('\n\n6. Listing all users');
let userCount = 0;
let paginationToken: string | undefined;
while (true) {
const result = await SuperTokens.getUsersNewestFirst({
tenantId: 'public',
paginationToken,
});
for (const user of result.users) {
userCount++;
}
paginationToken = result.nextPaginationToken;
if (result.nextPaginationToken === undefined) break;
}
console.log(`Users count: ${userCount}`);
// 7. Count users
console.log('\n\n7. Count users');
const total = await SuperTokens.getUserCount();
console.log(`Users count: ${total}`);
// Write stats to file
StatsCollector.getInstance().writeToFile();
console.log('\nStats written to stats.json');
} catch (error) {
console.error('An error occurred during execution:', error);
throw error;
} finally {
await deleteStInstance(deployment.deployment_id);
}
}
main();

View File

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "es2016",
"module": "commonjs",
"lib": ["ES2020"],
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "./dist",
"rootDir": "./src",
"types": ["node"]
},
"include": ["src/**/*"],
"exclude": ["node_modules"]
}