Compare commits

..

24 Commits
master ... 6.0

Author SHA1 Message Date
Tamas Soltesz fe894ef8a0
fix: version bump for backport release (#1187)
* fix: version bump for backport release

* Update CHANGELOG.md

Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com>

---------

Co-authored-by: Sattvik Chakravarthy <sattvik@supertokens.com>
Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com>
2025-09-03 11:51:53 +05:30
Sattvik Chakravarthy c0fb714688
fix: Create do-release.yml 2025-08-11 11:11:11 +05:30
Sattvik Chakravarthy 42d4ac0027
fix: add-dev-tag.yml 2025-08-09 12:16:38 +05:30
Sattvik Chakravarthy 68325eb117
fix: Create add-dev-tag.yml 2025-08-09 12:06:22 +05:30
Sattvik Chakravarthy 7bcd27cd25
fix: Create wait-for-docker.py 2025-08-09 11:56:22 +05:30
Sattvik Chakravarthy 2447f87ca9
fix: for backport release 6.0 (#1176)
* fix: remove circle ci, add github action tests

* fix: update supertokens-root branch

* fix: dev docker image

* fix: supertokens-root branch

* experiment: parallel running tests

* experiment: parallel running tests

* experiment: parallel running tests

* fix: dependencies

* fix: test command

* fix: test gen

* fix: remove unnecessary checkout

* fix: test command

* fix: glob

* fix: test names

* fix: test naming

* fix: failing test

* fix: devtag and stress test flows

---------

Co-authored-by: tamassoltesz <tamas@supertokens.com>
2025-08-09 11:53:52 +05:30
Mihály Lengyel f13f2ba4e4
Merge pull request #1169 from supertokens/backport/logs_to_otel_60
backport: logs to otel 60
2025-08-04 12:57:13 +02:00
tamassoltesz e774df6cf6 backport: logs to otel
fix: add implementationDependencies.json dependencies

chore: build version and changelog

fix: add missing config and devConfig entries

fix: remove accidentally merged lines
2025-07-25 15:06:54 +02:00
rishabhpoddar 5069bf35ed adding dev-v6.0.19 tag to this commit to ensure building 2024-03-29 18:27:39 +05:30
Sattvik Chakravarthy 08079bc5d5
fix: backport to core 6.0 (#972) 2024-03-29 17:54:03 +05:30
rishabhpoddar e1b37b0be3 adding dev-v6.0.18 tag to this commit to ensure building 2024-02-27 18:13:32 +05:30
Sattvik Chakravarthy 93df2080fc
fix: vulnerability fix (backport to 6.0) (#930)
* fix: vulnerability fix

* fix: backport

* fix: test
2024-02-27 18:09:21 +05:30
rishabhpoddar 1d18e368cb adding dev-v6.0.17 tag to this commit to ensure building 2024-02-10 14:29:15 +05:30
Sattvik Chakravarthy 48754e2b7c
fix: load only cud config in core (#920)
* fix: load only cud

* fix: connection pool handling

* fix: tests

* fix: version update

* fix: tests
2024-02-10 14:24:53 +05:30
rishabhpoddar 01cef02d41 adding dev-v6.0.16 tag to this commit to ensure building 2023-11-03 16:22:49 +05:30
Sattvik Chakravarthy b3585da402
fix: requests stats (#876) 2023-11-03 16:21:35 +05:30
rishabhpoddar 21ea25a722 adding dev-v6.0.15 tag to this commit to ensure building 2023-10-18 18:59:57 +05:30
Sattvik Chakravarthy 497fe8623e
fix: skip postgres tests (#857)
* fix: skip postgres tests

* fix: typo
2023-10-18 18:58:54 +05:30
rishabhpoddar a97b7e88d6 adding dev-v6.0.15 tag to this commit to ensure building 2023-10-18 13:13:18 +05:30
Sattvik Chakravarthy 3d8564bf5c
fix: test (#855) 2023-10-18 13:12:33 +05:30
rishabhpoddar b4b266f950 adding dev-v6.0.15 tag to this commit to ensure building 2023-10-18 12:50:25 +05:30
Sattvik Chakravarthy 50d860218d
fix: crontask per app issue (#854) 2023-10-18 12:48:54 +05:30
rishabhpoddar 6a6a1d4879 adding dev-v6.0.14 tag to this commit to ensure building 2023-10-12 11:34:45 +05:30
Sattvik Chakravarthy c270d27337
fix: duplicate cron task (#835) 2023-10-12 11:33:35 +05:30
751 changed files with 9461 additions and 390382 deletions

View File

@ -1,64 +0,0 @@
FROM ubuntu:22.04
RUN apt-get update -y
#&& apt-get upgrade -y
RUN apt-get install build-essential -y --fix-missing
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN [ -d /var/run/mysqld ] || mkdir -p /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
RUN mkdir /usr/java
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 21.0.7
RUN wget https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz -C /usr/java/
RUN mv /usr/java/jdk-21.0.7+6 /usr/java/jdk-21.0.7
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'JRE_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 1
#install postgres 13
# Import Repository Signing Key
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install curl gpg gnupg2 software-properties-common apt-transport-https lsb-release ca-certificates sudo -y
# Add PostgreSQL repository
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list
# Update again
RUN apt update
# Install PostgreSQL 13
RUN apt install -y postgresql-13
# Verify PostgreSQL 13 Installation on Ubuntu 22.04|20.04|18.04
RUN psql --version
# Manage PostgreSQL 13 service
#you can manage with `service postgresql start`

View File

@ -1,57 +0,0 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 21.0.7
RUN wget https://download.java.net/java/GA/jdk21.0.7/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-21.0.7_linux-x64_bin.tar.gz
RUN mv openjdk-21.0.7_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-21.0.7_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,57 +0,0 @@
FROM ubuntu:18.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,63 +0,0 @@
FROM ubuntu:22.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN [ -d /var/run/mysqld ] || mkdir -p /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
RUN mkdir /usr/java
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 21.0.7
RUN wget https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz /usr/java
RUN mkdir -p /usr/java/jdk-21.0.7
RUN cd /usr/java && tar -xzvf OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz -C /usr/java/jdk-21.0.7
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 1
#install postgres 13
# Import Repository Signing Key
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install curl gpg gnupg2 software-properties-common apt-transport-https lsb-release ca-certificates sudo -y
# Add PostgreSQL repository
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list
# Update again
RUN apt update
# Install PostgreSQL 13
RUN apt install -y postgresql-13
# Verify PostgreSQL 13 Installation on Ubuntu 22.04|20.04|18.04
RUN psql --version
# Manage PostgreSQL 13 service
#you can manage with `service postgresql start`

View File

@ -1,94 +0,0 @@
version: 2.1
orbs:
slack: circleci/slack@3.4.2
jobs:
test:
docker:
- image: tamassupertokens/supertokens_core_testing
- image: rishabhpoddar/oauth-server-cicd
- image: mongo
environment:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: root
resource_class: large
parallelism: 4
parameters:
plugin:
type: string
steps:
- checkout
- run: mkdir ~/junit
- run: echo $'\n[mysqld]\ncharacter_set_server=utf8mb4\nmax_connections=10000' >> /etc/mysql/mysql.cnf
- run: echo "host all all 0.0.0.0/0 md5" >> /etc/postgresql/13/main/pg_hba.conf
- run: echo "listen_addresses='*'" >> /etc/postgresql/13/main/postgresql.conf
- run: sed -i 's/^#*\s*max_connections\s*=.*/max_connections = 10000/' /etc/postgresql/13/main/postgresql.conf
- run: (cd .circleci/ && ./doTests.sh << parameters.plugin >>)
- store_test_results:
path: ~/junit
- slack/status
mark-passed:
docker:
- image: tamassupertokens/supertokens_core_testing
steps:
- checkout
- run: (cd .circleci && ./markPassed.sh)
- slack/status
workflows:
version: 2
tagged-build:
jobs:
- test:
plugin: sqlite
name: test-sqlite
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: mongodb
name: test-mongodb
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: postgresql
name: test-postgresql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: mysql
name: test-mysql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- mark-passed:
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
requires:
- test-sqlite
- test-mongodb
- test-postgresql
- test-mysql

View File

@ -1,260 +0,0 @@
function cleanup {
if test -f "pluginInterfaceExactVersionsOutput"; then
rm pluginInterfaceExactVersionsOutput
fi
}
trap cleanup EXIT
cleanup
pluginToTest=$1
pinnedDBJson=$(curl -s -X GET \
'https://api.supertokens.io/0/plugin/pinned?planType=FREE' \
-H 'api-version: 0')
pinnedDBLength=$(echo "$pinnedDBJson" | jq ".plugins | length")
pinnedDBArray=$(echo "$pinnedDBJson" | jq ".plugins")
echo "got pinned dbs..."
pluginInterfaceJson=$(cat ../pluginInterfaceSupported.json)
pluginInterfaceLength=$(echo "$pluginInterfaceJson" | jq ".versions | length")
pluginInterfaceArray=$(echo "$pluginInterfaceJson" | jq ".versions")
echo "got plugin interface relations"
coreDriverJson=$(cat ../coreDriverInterfaceSupported.json)
coreDriverArray=$(echo "$coreDriverJson" | jq ".versions")
echo "got core driver relations"
./getPluginInterfaceExactVersions.sh "$pluginInterfaceLength" "$pluginInterfaceArray"
if [[ $? -ne 0 ]]
then
echo "all plugin interfaces found... failed. exiting!"
exit 1
else
echo "all plugin interfaces found..."
fi
# get core version
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PUT \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"pluginInterfaces\": $pluginInterfaceArray,
\"coreDriverInterfaces\": $coreDriverArray
}")
if [ "$responseStatus" -ne "200" ]
then
echo "failed core PUT API status code: $responseStatus. Exiting!"
exit 1
fi
mkdir -p ~/junit
someTestsRan=false
while read -u 10 line
do
if [[ $line = "" ]]; then
continue
fi
i=0
currTag=$(echo "$line" | jq .tag)
currTag=$(echo "$currTag" | tr -d '"')
currVersion=$(echo "$line" | jq .version)
currVersion=$(echo "$currVersion" | tr -d '"')
piX=$(cut -d'.' -f1 <<<"$currVersion")
piY=$(cut -d'.' -f2 <<<"$currVersion")
piVersion="$piX.$piY"
while [ $i -lt "$pinnedDBLength" ]; do
someTestsRan=true
currPinnedDb=$(echo "$pinnedDBArray" | jq ".[$i]")
currPinnedDb=$(echo "$currPinnedDb" | tr -d '"')
i=$((i+1))
if [[ $currPinnedDb == $pluginToTest ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion ====="
echo ""
echo ""
echo ""
echo ""
echo ""
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/dependency/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$piVersion&pluginName=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .plugin) == "null" ]]
then
echo "fetching latest X.Y version for $currPinnedDb given plugin-interface X.Y version: $piVersion gave response: $response"
exit 1
fi
pinnedDbVersionX2=$(echo $response | jq .plugin | tr -d '"')
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$pinnedDbVersionX2&name=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .tag) == "null" ]]
then
echo "fetching latest X.Y.Z version for $currPinnedDb, X.Y version: $pinnedDbVersionX2 gave response: $response"
exit 1
fi
pinnedDbVersionTag=$(echo "$response" | jq .tag | tr -d '"')
pinnedDbVersion=$(echo "$response" | jq .version | tr -d '"')
./startDb.sh "$currPinnedDb"
fi
cd ../../
git clone git@github.com:supertokens/supertokens-root.git
cd supertokens-root
rm gradle.properties
update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 2
update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 2
coreX=$(cut -d'.' -f1 <<<"$coreVersion")
coreY=$(cut -d'.' -f2 <<<"$coreVersion")
if [[ $currPinnedDb == "sqlite" ]]
then
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion" > modules.txt
else
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion\n$currPinnedDb-plugin,$pinnedDbVersionX2" > modules.txt
fi
./loadModules
cd supertokens-core
git checkout dev-v$coreVersion
cd ../supertokens-plugin-interface
git checkout $currTag
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
cd ../supertokens-$currPinnedDb-plugin
git checkout $pinnedDbVersionTag
fi
cd ../
echo $SUPERTOKENS_API_KEY > apiPassword
./startTestingEnv --cicd
TEST_EXIT_CODE=$?
if [ -d ~/junit ]
then
echo "Copying output from core"
cp ~/supertokens-root/supertokens-core/build/test-results/test/*.xml ~/junit/
if [[ $pluginToTest != "sqlite" ]]
then
echo "Copying output from plugin"
cp ~/supertokens-root/supertokens-$pluginToTest-plugin/build/test-results/test/*.xml ~/junit/
fi
fi
if [[ $TEST_EXIT_CODE -ne 0 ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion FAILED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cat logs/*
cd ../project/
echo "test failed... exiting!"
exit 1
fi
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion SUCCEEDED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cd ..
rm -rf supertokens-root
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
curl -o supertokens.zip -s -X GET \
"https://api.supertokens.io/0/app/download?pluginName=$currPinnedDb&os=linux&mode=DEV&binary=FREE&targetCore=$coreVersion&targetPlugin=$pinnedDbVersion" \
-H 'api-version: 0'
unzip supertokens.zip -d .
rm supertokens.zip
cd supertokens
../project/.circleci/testCli.sh
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
cd ../
fi
rm -rf supertokens
cd project/.circleci
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
./stopDb.sh $currPinnedDb
fi
fi
done
done 10<pluginInterfaceExactVersionsOutput
if [[ $someTestsRan = "true" ]]
then
echo "tests ran successfully"
else
echo "no test ran"
exit 1
fi

View File

@ -1,19 +0,0 @@
# args: <length of array> <array like ["0.0", "0.1"]>
touch pluginInterfaceExactVersionsOutput
i=0
while [ $i -lt $1 ]; do
currVersion=`echo $2 | jq ".[$i]"`
currVersion=`echo $currVersion | tr -d '"'`
i=$((i+1))
# now we have the current version like 0.0.
# We now have to find something that matches dev-v0.0.* or v0.0.*
response=`curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$currVersion" \
-H 'api-version: 0'`
if [[ `echo $response | jq .tag` == "null" ]]
then
echo $response
exit 1
fi
echo $response >> pluginInterfaceExactVersionsOutput
done

View File

@ -1,29 +0,0 @@
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
echo "calling /core PATCH to make testing passed"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PATCH \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"testPassed\": true
}")
if [ "$responseStatus" -ne "200" ]
then
echo "patch api failed"
exit 1
fi

View File

@ -1 +0,0 @@
chown -R mysql:mysql /var/lib/mysql /var/run/mysqld && service mysql start

View File

@ -1,113 +0,0 @@
case $1 in
mysql)
(cd / && ./runMySQL.sh)
mysql -u root --password=root -e "CREATE DATABASE supertokens;"
mysql -u root --password=root -e "CREATE DATABASE st0;"
mysql -u root --password=root -e "CREATE DATABASE st1;"
mysql -u root --password=root -e "CREATE DATABASE st2;"
mysql -u root --password=root -e "CREATE DATABASE st3;"
mysql -u root --password=root -e "CREATE DATABASE st4;"
mysql -u root --password=root -e "CREATE DATABASE st5;"
mysql -u root --password=root -e "CREATE DATABASE st6;"
mysql -u root --password=root -e "CREATE DATABASE st7;"
mysql -u root --password=root -e "CREATE DATABASE st8;"
mysql -u root --password=root -e "CREATE DATABASE st9;"
mysql -u root --password=root -e "CREATE DATABASE st10;"
mysql -u root --password=root -e "CREATE DATABASE st11;"
mysql -u root --password=root -e "CREATE DATABASE st12;"
mysql -u root --password=root -e "CREATE DATABASE st13;"
mysql -u root --password=root -e "CREATE DATABASE st14;"
mysql -u root --password=root -e "CREATE DATABASE st15;"
mysql -u root --password=root -e "CREATE DATABASE st16;"
mysql -u root --password=root -e "CREATE DATABASE st17;"
mysql -u root --password=root -e "CREATE DATABASE st18;"
mysql -u root --password=root -e "CREATE DATABASE st19;"
mysql -u root --password=root -e "CREATE DATABASE st20;"
mysql -u root --password=root -e "CREATE DATABASE st21;"
mysql -u root --password=root -e "CREATE DATABASE st22;"
mysql -u root --password=root -e "CREATE DATABASE st23;"
mysql -u root --password=root -e "CREATE DATABASE st24;"
mysql -u root --password=root -e "CREATE DATABASE st25;"
mysql -u root --password=root -e "CREATE DATABASE st26;"
mysql -u root --password=root -e "CREATE DATABASE st27;"
mysql -u root --password=root -e "CREATE DATABASE st28;"
mysql -u root --password=root -e "CREATE DATABASE st29;"
mysql -u root --password=root -e "CREATE DATABASE st30;"
mysql -u root --password=root -e "CREATE DATABASE st31;"
mysql -u root --password=root -e "CREATE DATABASE st32;"
mysql -u root --password=root -e "CREATE DATABASE st33;"
mysql -u root --password=root -e "CREATE DATABASE st34;"
mysql -u root --password=root -e "CREATE DATABASE st35;"
mysql -u root --password=root -e "CREATE DATABASE st36;"
mysql -u root --password=root -e "CREATE DATABASE st37;"
mysql -u root --password=root -e "CREATE DATABASE st38;"
mysql -u root --password=root -e "CREATE DATABASE st39;"
mysql -u root --password=root -e "CREATE DATABASE st40;"
mysql -u root --password=root -e "CREATE DATABASE st41;"
mysql -u root --password=root -e "CREATE DATABASE st42;"
mysql -u root --password=root -e "CREATE DATABASE st43;"
mysql -u root --password=root -e "CREATE DATABASE st44;"
mysql -u root --password=root -e "CREATE DATABASE st45;"
mysql -u root --password=root -e "CREATE DATABASE st46;"
mysql -u root --password=root -e "CREATE DATABASE st47;"
mysql -u root --password=root -e "CREATE DATABASE st48;"
mysql -u root --password=root -e "CREATE DATABASE st49;"
mysql -u root --password=root -e "CREATE DATABASE st50;"
;;
postgresql)
service postgresql start
sudo -u postgres psql --command "CREATE USER root WITH SUPERUSER PASSWORD 'root';"
createdb
psql -c "create database supertokens;"
psql -c "create database st0;"
psql -c "create database st1;"
psql -c "create database st2;"
psql -c "create database st3;"
psql -c "create database st4;"
psql -c "create database st5;"
psql -c "create database st6;"
psql -c "create database st7;"
psql -c "create database st8;"
psql -c "create database st9;"
psql -c "create database st10;"
psql -c "create database st11;"
psql -c "create database st12;"
psql -c "create database st13;"
psql -c "create database st14;"
psql -c "create database st15;"
psql -c "create database st16;"
psql -c "create database st17;"
psql -c "create database st18;"
psql -c "create database st19;"
psql -c "create database st20;"
psql -c "create database st21;"
psql -c "create database st22;"
psql -c "create database st23;"
psql -c "create database st24;"
psql -c "create database st25;"
psql -c "create database st26;"
psql -c "create database st27;"
psql -c "create database st28;"
psql -c "create database st29;"
psql -c "create database st30;"
psql -c "create database st31;"
psql -c "create database st32;"
psql -c "create database st33;"
psql -c "create database st34;"
psql -c "create database st35;"
psql -c "create database st36;"
psql -c "create database st37;"
psql -c "create database st38;"
psql -c "create database st39;"
psql -c "create database st40;"
psql -c "create database st41;"
psql -c "create database st42;"
psql -c "create database st43;"
psql -c "create database st44;"
psql -c "create database st45;"
psql -c "create database st46;"
psql -c "create database st47;"
psql -c "create database st48;"
psql -c "create database st49;"
psql -c "create database st50;"
esac

View File

@ -1,8 +0,0 @@
case $1 in
mysql)
service mysql stop
;;
postgresql)
service postgresql stop
;;
esac

View File

@ -1,71 +0,0 @@
# inside supertokens downloaded zip
./install
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens start --port=8888
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
sed -i 's/# mysql_connection_uri:/mysql_connection_uri: "mysql:\/\/root:root@localhost:3306?rewriteBatchedStatements=true"/g' /usr/lib/supertokens/config.yaml
sed -i 's/# mongodb_connection_uri:/mongodb_connection_uri: mongodb:\/\/root:root@localhost:27017/g' /usr/lib/supertokens/config.yaml
sed -i 's/# disable_telemetry:/disable_telemetry: true/g' /usr/lib/supertokens/config.yaml
supertokens start --port=8889
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8889/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8888/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens stop
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens uninstall
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi

View File

@ -8,6 +8,7 @@ labels: 'bug'
(A clear and concise description of what the bug is)
## Useful informations
(Write what happened. Add screenshots, stacktraces, videos, anything that can help)

View File

@ -10,5 +10,4 @@ labels: 'enhancement'
## Implementation details
(Please outline any details about how this feature would e implemented. If you don't know, you can just skip this
section.)
(Please outline any details about how this feature would e implemented. If you don't know, you can just skip this section.)

View File

@ -6,39 +6,33 @@ labels:
# 📅 Checklist
## 🔶 Staging
## 🔶 Staging
### Dev Tag
- [supertokens-core:X.Y](https://github.com/supertokens/supertokens-core/tree/X.Y)
- [ ] core
- [supertokens-core:X.Y](https://github.com/supertokens/supertokens-core/tree/X.Y)
- [ ] core
- [ ] check CDI, plugin interface list
- [ ] Add migration script for psql / mysql
- [ ] Make sure no memory leak
- [ ] plugin-interface
- [ ] plugin-interface
- [ ] check plugin interface list
- [ ] mysql-plugin
- [ ] check plugin interface list
- [ ] Add migration script for mysql
- [ ] postgresql-plugin
- [ ] check plugin interface list
- [ ] Add migration script for psql
- [ ] mongodb-plugin
- [ ] check plugin interface list
- [ ] [supertokens-node:X.Y](https://github.com/supertokens/supertokens-node/tree/X.Y)
- [ ] check CDI, FDI list
- [ ] Make sure all PR checks are passing - specifically example apps checks should all be passing
- [ ] [supertokens-golang:X.Y](https://github.com/supertokens/supertokens-golang/tree/X.Y)
- [ ] check CDI, FDI list
- [ ] [supertokens-python:X.Y](https://github.com/supertokens/supertokens-python/tree/X.Y)
- [ ] check CDI, FDI list
- [ ] [supertokens-website:X.Y](https://github.com/supertokens/supertokens-website/X.Y)
- [ ] check FDI list
- [ ] [supertokens-web-js:X.Y](https://github.com/supertokens/supertokens-web-js/X.Y)
- [ ] check FDI list
- [ ] check web-js interface version
- [ ] Update dependency version of supertokens-website in package.json from npm registry
- [ ] [supertokens-auth-react:X.Y](https://github.com/supertokens/supertokens-auth-react/tree/X.Y)
- [ ] mysql-plugin
- [ ] check plugin interface list
- [ ] postgresql-plugin
- [ ] check plugin interface list
- [ ] mongodb-plugin
- [ ] check plugin interface list
- [ ] [supertokens-node:X.Y](https://github.com/supertokens/supertokens-node/tree/X.Y)
- [ ] check CDI, FDI list
- [ ] [supertokens-golang:X.Y](https://github.com/supertokens/supertokens-golang/tree/X.Y)
- [ ] check CDI, FDI list
- [ ] [supertokens-python:X.Y](https://github.com/supertokens/supertokens-python/tree/X.Y)
- [ ] check CDI, FDI list
- [ ] [supertokens-website:X.Y](https://github.com/supertokens/supertokens-website/X.Y)
- [ ] check FDI list
- [ ] [supertokens-web-js:X.Y](https://github.com/supertokens/supertokens-web-js/X.Y)
- [ ] check FDI list
- [ ] check web-js interface version
- [ ] Update dependency version of supertokens-website in package.json from npm registry
- [ ] [supertokens-auth-react:X.Y](https://github.com/supertokens/supertokens-auth-react/tree/X.Y)
- [ ] check FDI list
- [ ] check web-js interface version
- [ ] Updated dependencies to use supertokens-web-js in package.json from npm registry
@ -46,114 +40,96 @@ labels:
- [ ] Mobile responsiveness
- [ ] Make sure using with-typescript example that types are correct for every new configs exposed to users
- [ ] Make sure frontend login UI shows even if backend is not working.
- [ ] Make sure all PR checks are passing - specifically example apps checks should all be passing
- [ ] [prebuiltui:X.Y](https://github.com/supertokens/prebuiltui) (This is based on supertokens-auth-react release)
- [ ] If new recipe, then make sure to expose it as a window variable, and also change implementation of `checkFrontendSDKRelatedDocs` in the docs repo (global search it) - modify the `ALLOWED_LINES` variable to add about the new recipe.
- [ ] [supertokens-react-native:X.Y](https://github.com/supertokens/supertokens-react-native/X.Y)
- [ ] [supertokens-react-native:X.Y](https://github.com/supertokens/supertokens-react-native/X.Y)
- [ ] check FDI list
- [ ] [supertokens-android:X.Y](https://github.com/supertokens/supertokens-android/X.Y)
- [ ] check FDI list
- [ ] [supertokens-ios:X.Y](https://github.com/supertokens/supertokens-ios/X.Y)
- [ ] check FDI list
- [ ] [supertokens-flutter:X.Y](https://github.com/supertokens/supertokens-flutter/X.Y)
- [ ] check FDI list
- [ ] [supertokens-dashboard](https://github.com/supertokens/dashboard)
- [ ] Tested all items mentioned in this? https://github.com/supertokens/dashboard/blob/master/.github/PULL_REQUEST_TEMPLATE.md
- [ ] Make sure no loop to the core on the frontend or in the backend dashboard apis.
- [ ] Test day with team. Get people in the team to read the docs and implement something with the new feature.
- [ ] [supertokens-android:X.Y](https://github.com/supertokens/supertokens-android/X.Y)
- [ ] check FDI list
- [ ] [supertokens-ios:X.Y](https://github.com/supertokens/supertokens-ios/X.Y)
- [ ] check FDI list
- [ ] [supertokens-flutter:X.Y](https://github.com/supertokens/supertokens-flutter/X.Y)
- [ ] check FDI list
- [ ] [supertokens-dashboard](https://github.com/supertokens/dashboard)
### Others
- [ ] Example apps in create-supertokens-app CLI
- [ ] Create new example app in create-supertokens-app CLI?
- [ ] Examples apps in supertokens-auth-react. Update try.supertokens and rerun the pr checklist
- [ ] Examples apps in supertokens-web-js
- [ ] Examples apps in supertokens-react-native
- [ ] Examples apps in supertokens-golang
- [ ] Examples apps in supertokens-python
- [ ] Examples apps in supertokens-node. Update try.supertokens and rerun the pr checklist
- [ ] Examples apps in android
- [ ] Example apps in ios
- [ ] Example apps in flutter
- [ ] [next.js:canary](https://github.com/supertokens/next.js/tree/canary/examples/with-supertokens)
- [ ] RedwoodJS and playground-auth
- [ ] Run on netlify (and hence AWS lambda) to check if it works fine there
- [ ] Test on vercel (with-emailpassword-vercel app)
- [ ] SuperTokens Jackson SAML example update
- [ ] Supabase docs
- [ ] Capacitor template app: https://github.com/RobSchilderr/capacitor-supertokens-nextjs-turborepo
- [ ] T4 App: https://github.com/timothymiller/t4-app
- [ ] Examples apps in supertokens-auth-react
- [ ] Examples apps in supertokens-web-js
- [ ] Examples apps in supertokens-react-native
- [ ] Examples apps in supertokens-golang
- [ ] Examples apps in supertokens-python
- [ ] Examples apps in supertokens-node
- [ ] Examples apps in android
- [ ] Example apps in ios
- [ ] Example apps in flutter
- [ ] [next.js:canary](https://github.com/supertokens/next.js/tree/canary/examples/with-supertokens)
- [ ] RedwoodJS and playground-auth
- [ ] Run on netlify (and hence AWS lambda) to check if it works fine there
- [ ] Test on vercel (with-emailpassword-vercel app)
- [ ] SuperTokens Jackson SAML example update
- [ ] Supabase docs
- [ ] Capacitor template app: https://github.com/RobSchilderr/capacitor-supertokens-nextjs-turborepo
### 📚 Documentation (test site)
- [ ] All recipe main documentation update
- [ ] Code type checking versions are pointing to X.Y
- [ ] jsEnv
- [ ] goEnv
- [ ] pythonEnv
- [ ] jsEnv
- [ ] goEnv
- [ ] pythonEnv
- [ ] Update table schema in mysql / postgresql section for self hosted
- [ ] community documentation update
- [ ] website changes (test.supertokens.io)
- [ ] homepage
- [ ] pricing page feature list
- [ ] comparison chart in the pricing page
- [ ] product roadmap page
- [ ] Update API key code snippet in SaaS dashboard
- [ ] Update recipe list and links to the docs for supertokens.com dashboard
- [ ] homepage
- [ ] pricing page feature list
- [ ] comparison chart in the pricing page
- [ ] product roadmap page
- [ ] Update API key code snippet in SaaS dashboard
- [ ] Update recipe list and links to the docs for supertokens.com dashboard
## 🔥 Production
## 🔥 Production
### 💻 NPM and core release
- core
- core
- [ ] [supertokens-core:X.Y](https://github.com/supertokens/supertokens-core/tree/X.Y)
- [ ] plugin-interface
- [ ] mysql-plugin
- [ ] postgresql-plugin
- [ ] mongodb-plugin
- Docker update
- [ ] MySQL
- [ ] check if new env cnofigs need to be added
- [ ] Postgres
- [ ] check if new env cnofigs need to be added
- [ ] MongoDB
- [ ] check if new env cnofigs need to be added
- [ ] MySQL
- [ ] check if new env cnofigs need to be added
- [ ] Postgres
- [ ] check if new env cnofigs need to be added
- [ ] MongoDB
- [ ] check if new env cnofigs need to be added
- [ ] try.supertokens.io
```
sudo docker rm try-supertokens -f
sudo docker rmi supertokens/supertokens-postgresql:<VERSION>
nano ./start_container.sh (update version tag)
sudo ./start_container.sh
docker rm try-supertokens -f
docker rmi supertokens/supertokens-postgresql:<VERSION>
nano ~/try-supertokens/start_container.sh (update version tag)
~/try-supertokens/start_container.sh
```
- [ ] Run tests against node sdk (all compatible versions)
- [ ] Run tests against python sdk (all compatible versions)
- [ ] Run tests against golang sdk (all compatible versions)
- [ ] Update SaaS config
- [ ] Update to tables checked for user count / or to know if a deployment is being used or not
- [ ] Update logic for deleting all data in dev env if a new table was added and if the data should be removed from
it too
- [ ] Update logic for deleting all data in dev env if a new table was added and if the data should be removed from it too
- [ ] Update logic for exporting csv file for registered users
- [ ] Update SaaS instances to use the latest docker images.
- [ ]
Change [checklist in contributing guide for which tables to pick when migrating data from dev to prod instance](https://test.supertokens.com/docs/contribute/checklists/saas/tables-to-consider-for-data-migration-dev-to-prod).
- [ ] Change [checklist in contributing guide for which tables to pick when migrating data from dev to prod instance](https://test.supertokens.com/docs/contribute/checklists/saas/tables-to-consider-for-data-migration-dev-to-prod).
- [ ] Update license key used for cores to include nea feature.
- [ ] Update table schema in mysql / postgresql section for self hosted in docs.
- [ ] Update paid feature to min version mapping in /st/features GET.
- [ ] Update API that returns the list of paid features in saas dashboard
- [ ] Update logic for core to core migration for new saas architecture:
- [ ] transfer of master database information
- [ ] deletion of master database information related to the CUD being transferred
- [ ] [supertokens-node:X.Y](https://github.com/supertokens/supertokens-node/tree/X.Y)
- [ ] [supertokens-golang:X.Y](https://github.com/supertokens/supertokens-golang/tree/X.Y)
- [ ] [supertokens-website:X.Y](https://github.com/supertokens/supertokens-website/tree/X.Y)
- [ ] [supertokens-web-js:X.Y](https://github.com/supertokens/supertokens-web-js/tree/X.Y)
- [ ] [supertokens-auth-react:X.Y](https://github.com/supertokens/supertokens-auth-react/tree/X.Y)
- [ ] [prebuiltui:X.Y](https://github.com/supertokens/prebuiltui)
- [ ] [supertokens-python:X.Y](https://github.com/supertokens/supertokens-python/tree/X.Y)
- [ ] [supertokens-react-native:X.Y](https://github.com/supertokens/supertokens-react-native/X.Y)
- [ ] [supertokens-android:X.Y](https://github.com/supertokens/supertokens-android/X.Y)
- [ ] [supertokens-ios:X.Y](https://github.com/supertokens/supertokens-ios/X.Y)
- [ ] [supertokens-flutter:X.Y](https://github.com/supertokens/supertokens-flutter/X.Y)
- [ ] Update table schema in mysql / postgresql section for self hosted in docs
- [ ] Update API that returns the list of paid features in saas dashboard
- [ ] [supertokens-node:X.Y](https://github.com/supertokens/supertokens-node/tree/X.Y)
- [ ] [supertokens-golang:X.Y](https://github.com/supertokens/supertokens-golang/tree/X.Y)
- [ ] [supertokens-website:X.Y](https://github.com/supertokens/supertokens-website/tree/X.Y)
- [ ] [supertokens-web-js:X.Y](https://github.com/supertokens/supertokens-web-js/tree/X.Y)
- [ ] [supertokens-auth-react:X.Y](https://github.com/supertokens/supertokens-auth-react/tree/X.Y)
- [ ] [supertokens-python:X.Y](https://github.com/supertokens/supertokens-python/tree/X.Y)
- [ ] [supertokens-react-native:X.Y](https://github.com/supertokens/supertokens-react-native/X.Y)
- [ ] [supertokens-android:X.Y](https://github.com/supertokens/supertokens-android/X.Y)
- [ ] [supertokens-ios:X.Y](https://github.com/supertokens/supertokens-ios/X.Y)
- [ ] [supertokens-flutter:X.Y](https://github.com/supertokens/supertokens-flutter/X.Y)
- [ ] [supertokens-dashboard](https://github.com/supertokens/dashboard)
### 📚 Documentation
@ -170,28 +146,109 @@ labels:
- [ ] Algolia search update for docs
- [ ] robots.txt, sitemap.xml, noindex page update
- Auto generate release note on github:
- [ ] supertokens-core
- [ ] supertokens-plugin-interface
- [ ] supertokens-mysql-plugin
- [ ] supertokens-postgresql-plugin
- [ ] supertokens-mongodb-plugin
- [ ] supertokens-node
- [ ] supertokens-golang
- [ ] supertokens-python
- [ ] supertokens-website
- [ ] supertokens-web-js
- [ ] supertokens-auth-react
- [ ] prebuiltui repo
- [ ] supertokens-react-native
- [ ] supertokens-android
- [ ] supertokens-ios
- [ ] supertokens-flutter
- [ ] supertokens-dashboard
- [ ] supertokens-core
- [ ] supertokens-plugin-interface
- [ ] supertokens-mysql-plugin
- [ ] supertokens-postgresql-plugin
- [ ] supertokens-mongodb-plugin
- [ ] supertokens-node
- [ ] supertokens-golang
- [ ] supertokens-python
- [ ] supertokens-website
- [ ] supertokens-web-js
- [ ] supertokens-auth-react
- [ ] supertokens-react-native
- [ ] supertokens-android
- [ ] supertokens-ios
- [ ] supertokens-flutter
- [ ] supertokens-dashboard
### Contents of running try.supertokens.com script:
```bash
git clone github.com/supertokens/backend
cd backend/scripts/demo-dashboard
./addData.sh
docker run -d \
--restart=always \
--name try-supertokens \
--label name=try-supertokens \
--label type=session-service \
--label mode=production \
--log-driver=awslogs --log-opt awslogs-region=ap-south-1 --log-opt awslogs-group=try-supertokens --log-opt awslogs-stream=try-supertokens \
-e DISABLE_TELEMETRY=true \
--publish 9999:3567 \
supertokens/supertokens-postgresql:6.0
sleep 7
curl --location --request POST 'https://try.supertokens.com/recipe/dashboard/user' \
--header 'rid: dashboard' \
--header 'api-key: <YOUR-API-KEY>' \
--header 'Content-Type: application/json' \
--data-raw '{"email": "rishabh@supertokens.com","password": "abcd1234"}'
curl --location --request POST 'https://try.supertokens.com/recipe/dashboard/user' \
--header 'rid: dashboard' \
--header 'api-key: <YOUR-API-KEY>' \
--header 'Content-Type: application/json' \
--data-raw '{"email": "demo@supertokens.com","password": "abcd1234"}'
curl --location --request PUT 'https://try.supertokens.com/recipe/multitenancy/tenant' \
--header 'Content-Type: application/json' \
--data-raw '{
"tenantId": "tenant1",
"emailPasswordEnabled": true,
"thirdPartyEnabled": true,
"passwordlessEnabled": false
}'
curl --location --request PUT 'https://try.supertokens.com/tenant1/recipe/multitenancy/config/thirdparty' \
--header 'Content-Type: application/json' \
--data-raw '{
"config": {
"thirdPartyId": "google-workspaces",
"name": "Google Workspaces",
"clients": [
{
"clientId": "1060725074195-kmeum4crr01uirfl2op9kd5acmi9jutn.apps.googleusercontent.com",
"clientSecret": "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",
"additionalConfig": {
"hd": "*"
}
}
]
}
}'
curl --location --request PUT 'https://try.supertokens.com/recipe/multitenancy/tenant' \
--header 'Content-Type: application/json' \
--data-raw '{
"tenantId": "tenant2",
"emailPasswordEnabled": true,
"thirdPartyEnabled": false,
"passwordlessEnabled": false
}'
curl --location --request PUT 'https://try.supertokens.com/recipe/multitenancy/tenant' \
--header 'Content-Type: application/json' \
--data-raw '{
"tenantId": "tenant3",
"emailPasswordEnabled": false,
"thirdPartyEnabled": true,
"passwordlessEnabled": true
}'
curl --location --request PUT 'https://try.supertokens.com/tenant3/recipe/multitenancy/config/thirdparty' \
--header 'Content-Type: application/json' \
--data-raw '{
"config": {
"thirdPartyId": "github",
"name": "GitHub",
"clients": [
{
"clientId": "467101b197249757c71f",
"clientSecret": "e97051221f4b6426e8fe8d51486396703012f5bd"
}
]
}
}'
```

View File

@ -29,15 +29,11 @@ highlighting the necessary changes)
- [ ] Had installed and ran the pre-commit hook
- [ ] If there are new dependencies that have been added in `build.gradle`, please make sure to add them
in `implementationDependencies.json`.
- [ ] Update function `getValidFields` in `io/supertokens/config/CoreConfig.java` if new aliases were added for any core
config (similar to the `access_token_signing_key_update_interval` config alias).
- [ ] Update function `getValidFields` in `io/supertokens/config/CoreConfig.java` if new aliases were added for any core config (similar to the `access_token_signing_key_update_interval` config alias).
- [ ] Issue this PR against the latest non released version branch.
- To know which one it is, run find the latest released tag (`git tag`) in the format `vX.Y.Z`, and then find the
latest branch (`git branch --all`) whose `X.Y` is greater than the latest released tag.
- If no such branch exists, then create one from the latest released branch.
- [ ] If added a foreign key constraint on `app_id_to_user_id` table, make sure to delete from this table when deleting
the user as well if `deleteUserIdMappingToo` is false.
- [ ] If added a new recipe, then make sure to update the bulk import API to include the new recipe.
## Remaining TODOs for this PR

BIN
.github/auth_strategies.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 219 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.6 KiB

View File

@ -1,76 +0,0 @@
FROM ubuntu:22.04 AS tmp
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN apt-get install -y git-core wget unzip jq curl
# Install OpenJDK 21.0.7
RUN wget https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz /usr/java
RUN mkdir -p /usr/java/
RUN cd /usr/java && tar -xzvf OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv /usr/java/jdk-21.0.7+6 /usr/java/jdk-21.0.7
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 1
RUN wget -O docker-entrypoint.sh https://raw.githubusercontent.com/supertokens/supertokens-docker-postgresql/master/docker-entrypoint.sh
# RUN wget https://services.gradle.org/distributions/gradle-7.0-all.zip
# RUN unzip gradle-7.0-all.zip
# ENV GRADLE_HOME=/gradle-7.0
# ENV PATH=$PATH:$GRADLE_HOME/bin
RUN git clone https://github.com/supertokens/supertokens-root.git
WORKDIR /supertokens-root
COPY ./pluginInterfaceSupported.json pluginInterfaceSupported.json
RUN git clone --single-branch --branch "$(cat pluginInterfaceSupported.json | jq '.versions[-1]' | tr -d '"')" "https://github.com/supertokens/supertokens-plugin-interface.git"
RUN mkdir -p supertokens-core
COPY ./ supertokens-core
RUN echo "org.gradle.vfs.watch=false" >> ./gradle.properties
RUN ./loadModules
RUN ./utils/setupTestEnv --local
FROM debian:bookworm-slim
RUN groupadd supertokens && useradd -m -s /bin/bash -g supertokens supertokens
RUN apt-get update && apt-get install -y --no-install-recommends gnupg dirmngr curl unzip && rm -rf /var/lib/apt/lists/*
ENV GOSU_VERSION=1.7
RUN set -x \
&& apt-get update && apt-get install -y --no-install-recommends ca-certificates wget && rm -rf /var/lib/apt/lists/* \
&& wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture)" \
&& wget -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture).asc" \
&& export GNUPGHOME="$(mktemp -d)" \
&& gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 \
&& gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu \
&& gpgconf --kill all \
&& rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc \
&& chmod +x /usr/local/bin/gosu \
&& wget -O jre.zip "https://raw.githubusercontent.com/supertokens/jre/master/jre-21.0.7-linux.zip" \
&& mkdir -p /usr/lib/supertokens/jre \
&& unzip jre.zip \
&& mv jre-*/* /usr/lib/supertokens/jre \
&& apt-get purge -y --auto-remove ca-certificates wget unzip \
&& rm -rf jre.zip
COPY --from=tmp --chown=supertokens /supertokens-root/core /usr/lib/supertokens/core
COPY --from=tmp --chown=supertokens /supertokens-root/plugin-interface /usr/lib/supertokens/plugin-interface
COPY --from=tmp --chown=supertokens /supertokens-root/ee /usr/lib/supertokens/ee
COPY --from=tmp --chown=supertokens /supertokens-root/temp/config.yaml /usr/lib/supertokens/config.yaml
COPY --from=tmp --chown=supertokens /supertokens-root/version.yaml /usr/lib/supertokens/version.yaml
COPY --from=tmp --chown=supertokens /docker-entrypoint.sh /usr/local/bin/
RUN mkdir -p /lib/supertokens
RUN chown -R supertokens:supertokens /lib/supertokens
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
RUN echo "$(md5sum /usr/lib/supertokens/config.yaml | awk '{ print $1 }')" >> /CONFIG_HASH
RUN ln -s /usr/local/bin/docker-entrypoint.sh /entrypoint.sh # backwards compat
EXPOSE 3567
USER "supertokens"
CMD ["/usr/lib/supertokens/jre/bin/java", "-classpath", "/usr/lib/supertokens/core/*:/usr/lib/supertokens/plugin-interface/*:/usr/lib/supertokens/ee/*", "io.supertokens.Main", "/usr/lib/supertokens", "DEV", "host=0.0.0.0", "test_mode", "tempDirLocation=/usr/lib/supertokens/temp", "configFile=/usr/lib/supertokens/temp/config.yaml"]
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]

View File

@ -65,4 +65,4 @@ register_plugin_version(
plugin_version=plugin_version,
plugin_interface_array=plugin_interface_array,
plugin_name=os.environ.get("PLUGIN_NAME")
)
)

BIN
.github/list-user.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 79 KiB

BIN
.github/user-info.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 263 KiB

View File

@ -45,16 +45,16 @@ jobs:
runs-on: ubuntu-latest
needs: dependency-branches
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root

View File

@ -1,134 +0,0 @@
name: Container Security Scan
on:
# Allow manual triggering
workflow_dispatch:
# Run automatically once a day at 2 AM UTC
schedule:
- cron: '0 2 * * *'
jobs:
container-scan:
name: Scan SuperTokens PostgreSQL Container
runs-on: ubuntu-latest
steps:
- name: Run Azure Container Scan
id: container-scan
uses: Azure/container-scan@v0
continue-on-error: true
with:
image-name: supertokens/supertokens-postgresql:latest
severity-threshold: LOW
run-quality-checks: false
env:
DOCKER_CONTENT_TRUST: 1
- name: Upload scan results
id: upload-scan-results
uses: actions/upload-artifact@v4
with:
name: container-scan-results
path: |
${{ steps.container-scan.outputs.scan-report-path }}
retention-days: 30
- name: Generate Security Summary
id: security-summary
run: |
echo "summary<<EOF" >> $GITHUB_OUTPUT
echo "**Image:** \`supertokens/supertokens-postgresql:latest\`\n" >> $GITHUB_OUTPUT
echo "**Scan Date:** \`$(date -u)\`\n" >> $GITHUB_OUTPUT
echo "\n" >> $GITHUB_OUTPUT
# Get the scan report path from the container scan output
SCAN_REPORT_PATH="${{ steps.container-scan.outputs.scan-report-path }}"
if [ -f "$SCAN_REPORT_PATH" ]; then
# Count vulnerabilities by severity using the correct JSON structure
critical=$(jq '[.vulnerabilities[]? | select(.severity == "CRITICAL")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
high=$(jq '[.vulnerabilities[]? | select(.severity == "HIGH")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
medium=$(jq '[.vulnerabilities[]? | select(.severity == "MEDIUM")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
low=$(jq '[.vulnerabilities[]? | select(.severity == "LOW")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
total_vulns=$(jq '[.vulnerabilities[]?] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
echo "**Total Vulnerabilities:** $total_vulns\n" >> $GITHUB_OUTPUT
echo "\n" >> $GITHUB_OUTPUT
echo "- 🔴 **Critical**: $critical\n" >> $GITHUB_OUTPUT
echo "- 🟠 **High**: $high\n" >> $GITHUB_OUTPUT
echo "- 🟡 **Medium**: $medium\n" >> $GITHUB_OUTPUT
echo "- 🟢 **Low**: $low\n" >> $GITHUB_OUTPUT
echo "\n" >> $GITHUB_OUTPUT
else
echo "❌ **Scan results not found or scan failed**" >> $GITHUB_OUTPUT
fi
echo "\n" >> $GITHUB_OUTPUT
echo "[📃 Download the full report](${{ steps.upload-scan-results.outputs.artifact-url }})\n" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Add to Action Summary
run: |
echo "**Image:** \`supertokens/supertokens-postgresql:latest\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Scan Date:** \`$(date -u)\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Get the scan report path from the container scan output
SCAN_REPORT_PATH="${{ steps.container-scan.outputs.scan-report-path }}"
if [ -f "$SCAN_REPORT_PATH" ]; then
# Count vulnerabilities by severity using the correct JSON structure
critical=$(jq '[.vulnerabilities[]? | select(.severity == "CRITICAL")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
high=$(jq '[.vulnerabilities[]? | select(.severity == "HIGH")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
medium=$(jq '[.vulnerabilities[]? | select(.severity == "MEDIUM")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
low=$(jq '[.vulnerabilities[]? | select(.severity == "LOW")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
total_vulns=$(jq '[.vulnerabilities[]?] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
echo "**Total Vulnerabilities:** $total_vulns" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- 🔴 **Critical**: $critical" >> $GITHUB_STEP_SUMMARY
echo "- 🟠 **High**: $high" >> $GITHUB_STEP_SUMMARY
echo "- 🟡 **Medium**: $medium" >> $GITHUB_STEP_SUMMARY
echo "- 🟢 **Low**: $low" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Vulnerabilities:**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| ID | Package | Severity | | Description |" >> $GITHUB_STEP_SUMMARY
echo "|----|---------|----------|-|-------------|" >> $GITHUB_STEP_SUMMARY
# Extract and format vulnerabilities into a table with colored severity indicators, excluding LOW severity
jq -r '.vulnerabilities[]? | select(.severity != "LOW") | "| \(.vulnerabilityId // "N/A") | \(.packageName // "N/A") | \(.severity // "UNKNOWN") | \(if .severity == "CRITICAL" then "🔴" elif .severity == "HIGH" then "🟠" elif .severity == "MEDIUM" then "🟡" else "🟢" end) | \((.description // "No description available") | gsub("\n"; " ")) |"' "$SCAN_REPORT_PATH" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Scan results not found or scan failed**" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "[📃 Download the full report](${{ steps.upload-scan-results.outputs.artifact-url }})" >> $GITHUB_STEP_SUMMARY
- name: Post notification on Slack channel
id: deployment_message
uses: slackapi/slack-github-action@v2.1.0
with:
method: chat.postMessage
token: ${{ secrets.SLACK_BOT_TOKEN }}
payload: |
channel: ${{ secrets.SLACK_CHANNEL_ID }}
text: ""
blocks:
- type: "header"
text:
type: "plain_text"
text: "${{ steps.container-scan.outcome == 'success' && '✅' || '❌' }} Vulnerability Report: ${{ steps.container-scan.outcome == 'success' && 'All okay' || 'Needs attention' }}"
- type: "markdown"
text: "${{ steps.security-summary.outputs.summary }}"

View File

@ -53,10 +53,10 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- name: Login to Docker Hub
uses: docker/login-action@v3
@ -81,16 +81,16 @@ jobs:
runs-on: ubuntu-latest
needs: [dependency-branches, release-docker]
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root

View File

@ -8,20 +8,20 @@ jobs:
name: Lint PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true
changelog:
name: Enforce Changelog
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'
unit-tests:
name: Run unit tests
uses: ./.github/workflows/unit-test.yml
uses: ./.github/workflows/unit-test.yml

View File

@ -19,8 +19,6 @@ jobs:
id: result
with:
run-for: PR
core-branch: ${{ github.ref_name }}
docker:
name: Docker
runs-on: ubuntu-latest
@ -36,16 +34,16 @@ jobs:
# - mysql
# - mongodb
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
@ -101,4 +99,4 @@ jobs:
context: ./supertokens-root
tags: supertokens/supertokens-dev-${{ matrix.plugin }}:${{ steps.set_tag.outputs.TAG }}
file: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin/.github/helpers/docker/Dockerfile
platforms: linux/amd64,linux/arm64
platforms: linux/amd64,linux/arm64

View File

@ -10,7 +10,7 @@ on:
jobs:
stress-tests:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Node.js
@ -44,4 +44,4 @@ jobs:
echo "## Stress Test Results" >> $GITHUB_STEP_SUMMARY
echo "| Test | Duration |" >> $GITHUB_STEP_SUMMARY
echo "|------|----------|" >> $GITHUB_STEP_SUMMARY
jq -r '.measurements[] | "| \(.title) | \(.formatted) |"' stress-tests/stats.json >> $GITHUB_STEP_SUMMARY
jq -r '.measurements[] | "| \(.title) | \(.formatted) |"' stress-tests/stats.json >> $GITHUB_STEP_SUMMARY

View File

@ -3,10 +3,13 @@ name: Unit Tests
on:
workflow_call:
env:
total-runners: 12
jobs:
dependency-branches:
name: Dependency Branches
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
@ -16,33 +19,45 @@ jobs:
id: result
with:
run-for: PR
core-branch: ${{ github.head_ref }}
test:
name: Unit tests
runner-indexes:
runs-on: ubuntu-latest
name: Generate runner indexes
needs: dependency-branches
outputs:
json: ${{ steps.generate-index-list.outputs.json }}
steps:
- id: generate-index-list
run: |
MAX_INDEX=$((${{ env.total-runners }}-1))
INDEX_LIST=$(seq 0 ${MAX_INDEX})
INDEX_JSON=$(jq --null-input --compact-output '. |= [inputs]' <<< ${INDEX_LIST})
echo "::set-output name=json::${INDEX_JSON}"
unit-tests:
runs-on: ubuntu-latest
name: "Unit tests: ${{ matrix.plugin }} plugin, runner #${{ matrix.runner-index }}"
needs:
- dependency-branches
- runner-indexes
strategy:
fail-fast: false
matrix:
runner-index: ${{ fromjson(needs.runner-indexes.outputs.json) }}
plugin:
- sqlite
- postgresql
# no longer supported
# - mysql
# - mongodb
runs-on: ubuntu-22.04
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
@ -71,12 +86,33 @@ jobs:
- name: Start ${{ matrix.plugin }} server
if: matrix.plugin != 'sqlite'
run: cd supertokens-root/supertokens-${{ matrix.plugin }}-plugin && ./startDb.sh
- uses: chaosaffe/split-tests@v1-alpha.1
id: split-tests
name: Split tests
with:
glob: 'supertokens-root/*/src/test/java/**/*.java'
split-total: ${{ env.total-runners }}
split-index: ${{ matrix.runner-index }}
- run: 'echo "This runner will execute the following tests: ${{ steps.split-tests.outputs.test-suite }}"'
- name: Run tests
env:
ST_PLUGIN_NAME: ${{ matrix.plugin }}
run: |
cd supertokens-root
./gradlew test
echo "./gradlew test \\" > test.sh
chmod +x test.sh
IFS=' ' read -ra TESTS <<< "${{ steps.split-tests.outputs.test-suite }}"
for test in "${TESTS[@]}"; do
test_name="${test%.java}"
test_name="${test_name#supertokens-root/supertokens-core/src/test/java/}"
test_name="${test_name//\//.}"
echo " --tests $test_name \\" >> test.sh
done
echo "" >> test.sh
echo "this is the test command:"
cat test.sh
echo "--------------------------------"
./test.sh
- name: Publish Test Report
uses: mikepenz/action-junit-report@v5
if: always()
@ -84,4 +120,4 @@ jobs:
report_paths: '**/build/test-results/test/TEST-*.xml'
detailed_summary: true
include_passed: false
annotate_notice: true
annotate_notice: true

6
.gitignore vendored
View File

@ -12,7 +12,6 @@ gradle-app.setting
!cli/jar/**/*.jar
!downloader/jar/**/*.jar
!ee/jar/**/*.jar
!src/main/resources/**/*.jar
*target*
*.war
@ -48,7 +47,4 @@ local.properties
*.iml
ee/bin
addDevTag
addReleaseTag
install-linux.sh
install-windows.bat
addReleaseTag

File diff suppressed because it is too large Load Diff

View File

@ -45,7 +45,7 @@ We're happy to help!:raised_hands:
### Local Setup Prerequisites
- OS: Linux or macOS. Or if using Windows, you need to use [wsl2](https://docs.microsoft.com/en-us/windows/wsl/about).
- JDK: openjdk 21.0.7. Installation instructions for Mac and Linux can be found
- JDK: openjdk 15.0.1. Installation instructions for Mac and Linux can be found
in [our wiki](https://github.com/supertokens/supertokens-core/wiki/Installing-OpenJDK-for-Mac-and-Linux)
- IDE: [IntelliJ](https://www.jetbrains.com/idea/download/)(recommended) or equivalent IDE

View File

@ -7,193 +7,194 @@ Portions of this software are licensed as follows:
* Content outside of the above mentioned directories or restrictions above is available under the "Apache 2.0"
license as defined below.
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
END OF TERMS AND CONDITIONS
Copyright 2020-2023 SuperTokens, Inc.
Copyright 2020-2023 SuperTokens, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

139
README.md
View File

@ -7,44 +7,38 @@
alt="chat on Discord"></a>
<span><img src="https://img.shields.io/docker/pulls/supertokens/supertokens-postgresql.svg" alt="Docker pull stats"/></span>
Add **secure login and session management** to your apps. [SDKs available](https://supertokens.com/docs/community/sdks)
for popular languages and front-end frameworks e.g. Node.js, Go, Python, React.js, React Native, Vanilla JS, etc.
Add **secure login and session management** to your apps. [SDKs available](https://supertokens.com/docs/community/sdks) for popular languages and front-end frameworks e.g. Node.js, Go, Python, React.js, React Native, Vanilla JS, etc.
![Architecture Diagram](https://supertokens.com/img/architecture/self_hosted_generic.png)
Supertokens architecture is optimized to add secure authentication for your users without compromising on user and
developer experience
Supertokens architecture is optimized to add secure authentication for your users without compromising on user and developer experience
**Three building blocks of SuperTokens architecture**
1. Frontend SDK: Manages session tokens and renders login UI widgets
2. Backend SDK: Provides APIs for sign-up, sign-in, signout, session refreshing, etc. Your Frontend will talk to these
APIs
3. SuperTokens Core: The HTTP service for the core auth logic and database operations. This service is used by the
Backend SDK
2. Backend SDK: Provides APIs for sign-up, sign-in, signout, session refreshing, etc. Your Frontend will talk to these APIs
3. SuperTokens Core: The HTTP service for the core auth logic and database operations. This service is used by the Backend SDK
## Features
## Supports multiple auth strategies
[![Click here to get started](.github/click-here-to-get-started.png)](https://supertokens.com/docs/guides)
![auth recipes](.github/auth_strategies.png)
* Passwordless Login
* Social Login
* Email Password Login
* Phone Password Login
* Session Management
* Multi-Factor Authentication
* Multi Tenancy / Organization Support (Enterprise SSO)
* User Roles
* Microservice Authentication
Guides to setup different recipes
* [Passwordless](https://supertokens.com/docs/passwordless/introduction)
* [Social Login](https://supertokens.com/docs/thirdparty/introduction)
* [Email Password Login](https://supertokens.com/docs/emailpassword/introduction)
* [Phone Password Login](https://supertokens.com/docs/phonepassword/introduction)
* [Passwordless + Social Login](https://supertokens.com/docs/thirdpartypasswordless/introduction)
* [Email Password + Social Login](https://supertokens.com/docs/thirdpartyemailpassword/introduction)
* [Session Management](https://supertokens.com/docs/session/introduction)
## Learn more
- [🚀 What is SuperTokens?](https://github.com/supertokens/supertokens-core#-what-is-supertokens)
- [Philosophy](https://github.com/supertokens/supertokens-core#philosophy)
- [Features + Demo app](https://github.com/supertokens/supertokens-core#features---click-here-to-see-the-demo-app)
- [Documentation](https://github.com/supertokens/supertokens-core#documentation)
- [🏗️ Architecture](https://github.com/supertokens/supertokens-core#%EF%B8%8F-architecture)
- [☕ Why Java?](https://github.com/supertokens/supertokens-core#-why-java)
- [⌨️ User Management Dashboard](https://github.com/supertokens/supertokens-core#-user-management-dashboard)
- [🔥 SuperTokens vs Others](https://github.com/supertokens/supertokens-core#-supertokens-vs-others)
- [🛠️ Building from source](https://github.com/supertokens/supertokens-core#%EF%B8%8F-building-from-source)
- [👥 Community](https://github.com/supertokens/supertokens-core#-community)
@ -55,103 +49,65 @@ developer experience
### If you like our project, please :star2: this repository! For feedback, feel free to join our [Discord](https://supertokens.io/discord), or create an issue on this repo
## 🚀 What is SuperTokens?
SuperTokens is an open-core alternative to proprietary login providers like Auth0 or AWS Cognito. We are
different because we offer:
different because we offer:
- Open source: SuperTokens can be used for free, forever, with no limits on the number of users.
- An on-premises deployment so that you control 100% of your user data, using your own database.
- An end-to-end solution with login, sign-ups, user and session management, without all the complexities of OAuth
protocols.
- An end-to-end solution with login, sign-ups, user and session management, without all the complexities of OAuth protocols.
- Ease of implementation and higher security.
- Extensibility: Anyone can contribute and make SuperTokens better!
### Philosophy
Authentication directly affects the UX, dev experience, and security of any app. We believe that
current solutions cannot optimize for all three "pillars", leading to many
applications hand-rolling their own auth. This not only leads to security issues but is also a massive
time drain.
current solutions cannot optimize for all three "pillars", leading to many
applications hand-rolling their own auth. This not only leads to security issues but is also a massive
time drain.
We want to change that - we believe the only way is to provide a solution that has the right level of
abstraction gives you maximum control, is secure, and is simple to use - just like if you build it yourself,
from scratch (minus the time to learn, build, and maintain).
We also believe in the principle of least vendor lock-in. Your having full control of your user's data means that you
can switch away from SuperTokens without forcing your existing users to logout, reset their passwords, or in the worst
case, sign up again.
### [Click here](https://thirdpartyemailpassword.demo.supertokens.com/) to see the demo app.
abstraction gives you maximum control, is secure, and is simple to use - just like if you build it yourself,
from scratch (minus the time to learn, build, and maintain).
We also believe in the principle of least vendor lock-in. Your having full control of your user's data means that you can switch away from SuperTokens without forcing your existing users to logout, reset their passwords, or in the worst case, sign up again.
### Features - [Click here](https://thirdpartyemailpassword.demo.supertokens.io/) to see the demo app.
- Please visit [our website](https://supertokens.io/pricing) to see the list of features.
- We want to make features as decoupled as possible. This means you can use SuperTokens for just login, or just session
management, or both. In fact, we also offer session management integrations with other login providers like Auth0.
- We want to make features as decoupled as possible. This means you can use SuperTokens for just login, or just session management, or both. In fact, we also offer session management integrations with other login providers like Auth0.
### Documentation
The docs can be seen [on our website](https://supertokens.io/docs/community/introduction).
There is more information about SuperTokens on
the [GitHub wiki section](https://github.com/supertokens/supertokens-core/wiki).
There is more information about SuperTokens on the [GitHub wiki section](https://github.com/supertokens/supertokens-core/wiki).
## 🏗️ Architecture
Please find an [architecture diagram here](https://supertokens.io/docs/community/architecture)
**For more information, please visit
our [GitHub wiki section](https://github.com/supertokens/supertokens-core/wiki/SuperTokens-Architecture).**
**For more information, please visit our [GitHub wiki section](https://github.com/supertokens/supertokens-core/wiki/SuperTokens-Architecture).**
## ☕ Why Java?
- ✅ Whilst running Java can seem difficult, we provide the JDK along with the binary/docker image when distributing it.
This makes running SuperTokens just like running any other HTTP microservice.
- ✅ Whilst running Java can seem difficult, we provide the JDK along with the binary/docker image when distributing it. This makes running SuperTokens just like running any other HTTP microservice.
- ✅ Java has a very mature ecosystem. This implies that third-party libraries have been battle-tested.
- ✅ Java's strong type system ensures fewer bugs and easier maintainability. This is especially important when many
people are expected to work on the same project.
- ✅ Java's strong type system ensures fewer bugs and easier maintainability. This is especially important when many people are expected to work on the same project.
- ✅ Our team is most comfortable with Java and hiring great Java developers is relatively easy as well.
- ✅ One of the biggest criticisms of Java is memory usage. We have three solutions to this:
- The most frequent auth-related operation is session verification - this happens within the backend SDK (node,
python, Go) without contacting the Java core. Therefore, a single instance of the core can handle several 10s of
thousands of users fairly easily.
- We have carefully chosen our dependencies. For eg: we use an embedded tomcat server instead of a higher-level web
framework.
- We also plan on using [GraalVM](https://www.graalvm.org/) in the future and this can reduce memory usage by 95%!
- ✅ If you require any modifications to the auth APIs, those would need to be done on the backend SDK level (for example
Node, Golang, Python..). So youd rarely need to directly modify/work with the Java code in this repo.
## ⌨️ User Management Dashboard
Oversee your users with the [SuperTokens User Management Dashboard](https://supertokens.com/docs/userdashboard/about)
### List users
List all the users who have signed up to your application.
![List SuperTokens users](.github/list-user.png)
### Manage users
Manage users by modifying or deleting their sessions, metadata, roles and account info.
![Manage users](.github/user-info.png)
- ✅ One of the biggest criticisms of Java is memory usage. We have three solutions to this:
- The most frequent auth-related operation is session verification - this happens within the backend SDK (node, python, Go) without contacting the Java core. Therefore, a single instance of the core can handle several 10s of thousands of users fairly easily.
- We have carefully chosen our dependencies. For eg: we use an embedded tomcat server instead of a higher-level web framework.
- We also plan on using [GraalVM](https://www.graalvm.org/) in the future and this can reduce memory usage by 95%!
- ✅ If you require any modifications to the auth APIs, those would need to be done on the backend SDK level (for example Node, Golang, Python..). So youd rarely need to directly modify/work with the Java code in this repo.
## 🔥 SuperTokens vs others
Please find a detailed comparison chart [on our website](https://supertokens.io/pricing#comparison-chart)
## 🛠️ Building from source
Please see our [wiki](https://github.com/supertokens/supertokens-core/wiki/Building-from-source) for instructions.
## 👥 Community
- [Discord](https://supertokens.io/discord)
- [Email](mailto:team@supertokens.io)
If you think this is a project you could use in the future, please :star2: this repository!
### Contributors (across all SuperTokens repositories)
<table>
<tr>
<td align="center"><a href="https://github.com/rishabhpoddar"><img src="https://avatars1.githubusercontent.com/u/2976287?s=460&u=d0cf2463df96fbdf1138cf74f88d7cf41415b238&v=4" width="100px;" alt=""/><br /><sub><b>Rishabh Poddar</b></sub></a></td>
@ -205,7 +161,8 @@ If you think this is a project you could use in the future, please :star2: this
<td align="center"><a href="https://github.com/taijuten"><img src="https://avatars.githubusercontent.com/u/4288526?v=4" width="100px;" alt=""/><br /><sub><b>Ralph Lawrence</b></sub></a></td>
<td align="center"><a href="https://github.com/christopher-kapic"><img src="https://avatars.githubusercontent.com/u/59740769?v=4" width="100px;" alt=""/><br /><sub><b>Christopher Kapic</b></sub></a></td>
<td align="center"><a href="https://github.com/Hanzyusuf"><img src="https://avatars.githubusercontent.com/u/22171112?v=4" width="100px;" alt=""/><br /><sub><b>Hanzyusuf</b></sub></a></td>
<td align="center"><a href="https://github.com/porcellus"><img src="https://avatars.githubusercontent.com/u/1129990?v=4" width="100px;" alt=""/><br /><sub><b>Mihály Lengyel</b></sub></a></td>
<td align="center"><a href="https://github.com/porcellus"><img src="https://avatars.githubusercontent.com/u/1129990?v=4" width="100px;" alt=""/><br /><sub><b>
Mihály Lengyel</b></sub></a></td>
</tr>
<tr>
<td align="center"><a href="https://github.com/cerino-ligutom"><img src="https://avatars.githubusercontent.com/u/6721822?v=4" width="100px;" alt=""/><br /><sub><b>Cerino O. Ligutom III</b></sub></a></td>
@ -247,27 +204,19 @@ If you think this is a project you could use in the future, please :star2: this
<td align="center"><a href="https://github.com/sublimator"><img src="https://avatars.githubusercontent.com/u/525211?v=4" width="100px;" alt=""/><br /><sub><b>Nicholas Dudfield</b></sub></a></td>
<td align="center"><a href="https://github.com/Qdea"><img src="https://avatars.githubusercontent.com/u/58660439?v=4" width="100px;" alt=""/><br /><sub><b>Qdea</b></sub></a></td>
<td align="center"><a href="https://github.com/LukasKnuth"><img src="https://avatars.githubusercontent.com/u/692211?v=4" width="100px;" alt=""/><br /><sub><b>Lukas Knuth</b></sub></a></td>
<td align="center"><a href="https://github.com/melvynhills"><img src="https://avatars.githubusercontent.com/u/417315?v=4" width="100px;" alt=""/><br /><sub><b>Melvyn Hills</b></sub></a></td>
<td align="center"><a href="https://github.com/melvynhills"><img src="https://avatars.githubusercontent.com/u/417315?v=4" width="100px;" alt=""/><br /><sub><b>
Melvyn Hills</b></sub></a></td>
<tr>
<td align="center"><a href="https://github.com/mattanimation"><img src="https://avatars.githubusercontent.com/u/1426997?v=4" width="100px;" alt=""/><br /><sub><b>Matt Murray</b></sub></a></td>
<td align="center"><a href="https://github.com/constantoine"><img src="https://avatars.githubusercontent.com/u/13930958?v=4" width="100px;" alt=""/><br /><sub><b>Cléo Rebert</b></sub></a></td>
<td align="center"><a href="https://github.com/daniil-borovoy"><img src="https://avatars.githubusercontent.com/u/74528634?v=4" width="100px;" alt=""/><br /><sub><b>Daniil Borovoy</b></sub></a></td>
<td align="center"><a href="https://github.com/kriskw1999"><img src="https://avatars.githubusercontent.com/u/71312948?v=4" width="100px;" alt=""/><br /><sub><b>Krzysztof Witkowski</b></sub></a></td>
</tr>
<tr>
<td align="center"><a href="https://github.com/Lehoczky"><img src="https://avatars.githubusercontent.com/u/31937175?v=4" width="100px;" alt=""/><br /><sub><b>Lehoczky Zoltán</b></sub></a></td>
<td align="center"><a href="https://github.com/mavwolverine"><img src="https://avatars.githubusercontent.com/u/316111?v=4" width="100px;" alt=""/><br /><sub><b>Viraj Kanwade</b></sub></a></td>
<td align="center"><a href="https://github.com/anuragmerndev"><img src="https://avatars.githubusercontent.com/u/144275260?v=4" width="100px;" alt=""/><br /><sub><b>Anurag Srivastava</b></sub></a></td>
Melvyn Hills</b></sub></a></td>
</tr>
</table>
## 👩‍💻 Contributing
Please see the [CONTRIBUTING.md](https://github.com/supertokens/supertokens-core/blob/master/CONTRIBUTING.md) file for
instructions.
Please see the [CONTRIBUTING.md](https://github.com/supertokens/supertokens-core/blob/master/CONTRIBUTING.md) file for instructions.
## 📝 License
&copy; 2020-2023 SuperTokens Inc and its contributors. All rights reserved.
Portions of this software are licensed as follows:

View File

@ -8,8 +8,6 @@
plugins {
id 'application'
id 'java-library'
id "io.freefair.aspectj" version "8.13" //same as gradle version!
}
compileJava { options.encoding = "UTF-8" }
compileTestJava { options.encoding = "UTF-8" }
@ -21,37 +19,26 @@ compileTestJava { options.encoding = "UTF-8" }
// }
//}
java {
toolchain {
languageVersion.set(JavaLanguageVersion.of(21))
}
}
version = "11.3.0"
version = "6.0.21"
repositories {
mavenCentral()
maven { url 'https://build.shibboleth.net/nexus/content/repositories/releases/' }
}
dependencies {
// https://mvnrepository.com/artifact/com.google.code.gson/gson
// if this changes, remember to also change in the ee folder's build.gradle
implementation group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
implementation group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-yaml
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.18.2'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-cbor
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-cbor', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.16.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
// https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-core
api group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '11.0.12'
implementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '10.1.18'
// https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305
implementation group: 'com.google.code.findbugs', name: 'jsr305', version: '3.0.2'
@ -83,18 +70,9 @@ dependencies {
// https://mvnrepository.com/artifact/com.googlecode.libphonenumber/libphonenumber/
implementation group: 'com.googlecode.libphonenumber', name: 'libphonenumber', version: '8.13.25'
// https://mvnrepository.com/artifact/com.webauthn4j/webauthn4j-core
implementation group: 'com.webauthn4j', name: 'webauthn4j-core', version: '0.28.6.RELEASE'
implementation platform("io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:2.17.0-alpha")
// Open SAML
implementation group: 'org.opensaml', name: 'opensaml-core', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-saml-impl', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-security-impl', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-profile-impl', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-xmlsec-impl', version: '4.3.1'
implementation("ch.qos.logback:logback-core:1.5.18")
implementation("ch.qos.logback:logback-classic:1.5.18")
@ -106,13 +84,10 @@ dependencies {
implementation("io.opentelemetry.semconv:opentelemetry-semconv")
implementation('org.aspectj:aspectjrt:1.9.24')
compileOnly project(":supertokens-plugin-interface")
testImplementation project(":supertokens-plugin-interface")
// this is so that we can find plugin-interface jar while testing
testImplementation project(":supertokens-plugin-interface")
testImplementation 'junit:junit:4.12'
// https://mvnrepository.com/artifact/org.mockito/mockito-core
@ -123,9 +98,8 @@ dependencies {
testImplementation 'com.tngtech.archunit:archunit-junit4:0.22.0'
// https://mvnrepository.com/artifact/com.webauthn4j/webauthn4j-test
testImplementation group: 'com.webauthn4j', name: 'webauthn4j-test', version: '0.28.6.RELEASE'
}
application {
mainClass.set("io.supertokens.Main")
}
@ -135,47 +109,43 @@ jar {
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
test {
jvmArgs = ['-Djava.security.egd=file:/dev/urandom',
"--add-opens=java.base/java.lang=ALL-UNNAMED",
"--add-opens=java.base/java.util=ALL-UNNAMED",
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED"]
jvmArgs '-Djava.security.egd=file:/dev/urandom'
testLogging {
outputs.upToDateWhen { false }
showStandardStreams = true
}
maxParallelForks = Runtime.runtime.availableProcessors()
}
import org.gradle.api.tasks.testing.logging.TestExceptionFormat
import org.gradle.api.tasks.testing.logging.TestLogEvent
tasks.withType(Test).configureEach {
tasks.withType(Test) {
testLogging {
// set options for log level LIFECYCLE
events = [TestLogEvent.FAILED,
events TestLogEvent.FAILED,
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT]
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
TestLogEvent.STANDARD_OUT
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
events = [TestLogEvent.STARTED,
events TestLogEvent.STARTED,
TestLogEvent.FAILED,
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT]
exceptionFormat = TestExceptionFormat.FULL
TestLogEvent.STANDARD_OUT
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

View File

@ -4,8 +4,6 @@ plugins {
repositories {
mavenCentral()
maven { url 'https://build.shibboleth.net/nexus/content/repositories/releases/' }
}
application {
@ -18,13 +16,13 @@ jar {
dependencies {
// https://mvnrepository.com/artifact/com.google.code.gson/gson
implementation group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
implementation group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-yaml
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.16.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
// https://mvnrepository.com/artifact/de.mkammerer/argon2-jvm
implementation group: 'de.mkammerer', name: 'argon2-jvm', version: '2.11'
@ -35,9 +33,9 @@ dependencies {
testImplementation group: 'junit', name: 'junit', version: '4.12'
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
test {
@ -57,10 +55,10 @@ tasks.withType(Test) {
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
@ -70,7 +68,7 @@ tasks.withType(Test) {
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

View File

@ -1,40 +1,55 @@
{
"_comment": "Contains list of implementation dependencies URL for this project. This is a generated file, don't modify the contents by hand.",
"list": [
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1.jar",
"name":"gson 2.13.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0.jar",
"name":"error_prone_annotations 2.38.0",
"src":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2.jar",
"name":"jackson-dataformat-yaml 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3.jar",
"name":"snakeyaml 2.3",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2.jar",
"name":"jackson-databind 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name":"argon2-jvm 2.11",
"src":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name":"jbcrypt 0.4",
"src":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
}
]
"_comment": "Contains list of implementation dependencies URL for this project",
"list": [
{
"jar": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name": "Gson 2.3.1",
"src": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name": "Jackson Dataformat 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name": "SnakeYAML 2.2",
"src": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1.jar",
"name": "Jackson core 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name": "Jackson databind 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1.jar",
"name": "Jackson annotation 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name": "Argon2-jvm 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11.jar",
"name": "Argon2-jvm no libs 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name": "SQLite JDBC Driver 3.30.1",
"src": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0.jar",
"name": "JNA 5.8.0",
"src": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0-sources.jar"
}
]
}

Binary file not shown.

View File

@ -87,7 +87,7 @@ public class Main {
installationDir = args[1];
if (args.length == 2) {
args = new String[]{args[0], args[1], "--help"};
args = new String[] { args[0], args[1], "--help" };
}
command = args[2];

View File

@ -109,8 +109,7 @@ public class HashingCalibrateHandler extends CommandHandler {
@Override
public String getUsage() {
return "supertokens hashingCalibrate --with_alg=<argon2 | bcrypt> [--with_argon2_hashing_pool_size=1] "
+
"[--with_argon2_max_memory_mb=1024] [--with_argon2_parallelism=<value>] [--with_time_per_hash_ms=300]";
+ "[--with_argon2_max_memory_mb=1024] [--with_argon2_parallelism=<value>] [--with_time_per_hash_ms=300]";
}
@Override

View File

@ -82,7 +82,7 @@ public class InstallHandler extends CommandHandler {
Process process = pb.start();
String result = "";
try (InputStreamReader in = new InputStreamReader(process.getInputStream());
BufferedReader reader = new BufferedReader(in)) {
BufferedReader reader = new BufferedReader(in)) {
StringBuilder builder = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {

View File

@ -35,7 +35,6 @@ public class StartHandler extends CommandHandler {
public void doCommand(String installationDir, boolean viaInstaller, String[] args) {
String space = CLIOptionsParser.parseOption("--with-space", args);
String configPath = CLIOptionsParser.parseOption("--with-config", args);
String tempDirLocation = CLIOptionsParser.parseOption("--with-temp-dir", args);
if (configPath != null) {
configPath = new File(configPath).getAbsolutePath();
}
@ -43,38 +42,12 @@ public class StartHandler extends CommandHandler {
String host = CLIOptionsParser.parseOption("--host", args);
boolean foreground = CLIOptionsParser.hasKey("--foreground", args);
boolean forceNoInMemDB = CLIOptionsParser.hasKey("--no-in-mem-db", args);
boolean javaagentEnabled = CLIOptionsParser.hasKey("--javaagent", args);
boolean jmxEnabled = CLIOptionsParser.hasKey("--jmx", args);
String jmxPort = CLIOptionsParser.parseOption("--jmx-port", args);
String jmxAuthenticate = CLIOptionsParser.parseOption("--jmx-authenticate", args);
String jmxSSL = CLIOptionsParser.parseOption("--jmx-ssl", args);
List<String> commands = new ArrayList<>();
if (OperatingSystem.getOS() == OperatingSystem.OS.WINDOWS) {
commands.add(installationDir + "jre\\bin\\java.exe");
commands.add("-classpath");
commands.add("\"" + installationDir + "core\\*\";\"" + installationDir + "plugin-interface\\*\"");
if (javaagentEnabled) {
commands.add("-javaagent:\"" + installationDir + "agent\\opentelemetry-javaagent.jar\"");
}
if (jmxEnabled) {
commands.add("-Dcom.sun.management.jmxremote");
if (jmxPort != null) {
commands.add("-Dcom.sun.management.jmxremote.port=" + jmxPort);
} else {
commands.add("-Dcom.sun.management.jmxremote.port=9010");
}
if (jmxAuthenticate != null) {
commands.add("-Dcom.sun.management.jmxremote.authenticate=" + jmxAuthenticate);
} else {
commands.add("-Dcom.sun.management.jmxremote.authenticate=false");
}
if (jmxSSL != null) {
commands.add("-Dcom.sun.management.jmxremote.ssl=" + jmxSSL);
} else {
commands.add("-Dcom.sun.management.jmxremote.ssl=false");
}
}
if (space != null) {
commands.add("-Xmx" + space + "M");
}
@ -94,36 +67,12 @@ public class StartHandler extends CommandHandler {
if (forceNoInMemDB) {
commands.add("forceNoInMemDB=true");
}
if(tempDirLocation != null && !tempDirLocation.isEmpty()) {
commands.add("tempDirLocation=" + tempDirLocation);
}
} else {
commands.add(installationDir + "jre/bin/java");
commands.add("-Djava.security.egd=file:/dev/urandom");
commands.add("-classpath");
commands.add(
installationDir + "core/*:" + installationDir + "plugin-interface/*:" + installationDir + "ee/*");
if (javaagentEnabled) {
commands.add("-javaagent:" + installationDir + "agent/opentelemetry-javaagent.jar");
}
if (jmxEnabled) {
commands.add("-Dcom.sun.management.jmxremote");
if (jmxPort != null) {
commands.add("-Dcom.sun.management.jmxremote.port=" + jmxPort);
} else {
commands.add("-Dcom.sun.management.jmxremote.port=9010");
}
if (jmxAuthenticate != null) {
commands.add("-Dcom.sun.management.jmxremote.authenticate=" + jmxAuthenticate);
} else {
commands.add("-Dcom.sun.management.jmxremote.authenticate=false");
}
if (jmxSSL != null) {
commands.add("-Dcom.sun.management.jmxremote.ssl=" + jmxSSL);
} else {
commands.add("-Dcom.sun.management.jmxremote.ssl=false");
}
}
if (space != null) {
commands.add("-Xmx" + space + "M");
}
@ -141,18 +90,14 @@ public class StartHandler extends CommandHandler {
if (forceNoInMemDB) {
commands.add("forceNoInMemDB=true");
}
if(tempDirLocation != null && !tempDirLocation.isEmpty()) {
commands.add("tempDirLocation=" + tempDirLocation);
}
}
if (!foreground) {
try {
ProcessBuilder pb = new ProcessBuilder(commands);
Logging.info("Command to be run: " + String.join(" ", pb.command()));
pb.redirectErrorStream(true);
Process process = pb.start();
try (InputStreamReader in = new InputStreamReader(process.getInputStream());
BufferedReader reader = new BufferedReader(in)) {
BufferedReader reader = new BufferedReader(in)) {
String line;
boolean success = false;
while ((line = reader.readLine()) != null) {
@ -227,15 +172,6 @@ public class StartHandler extends CommandHandler {
"Sets the host on which this instance of SuperTokens should run. Example: \"--host=192.168.0.1\""));
options.add(
new Option("--foreground", "Runs this instance of SuperTokens in the foreground (not as a daemon)"));
options.add(
new Option("--with-temp-dir", "Uses the passed dir as temp dir, instead of the internal default."));
options.add(new Option("--javaagent", "Enables the OpenTelemetry Javaagent for tracing and metrics."));
options.add(new Option("--jmx", "Enables JMX management and monitoring."));
options.add(new Option("--jmx-port", "Sets the port for JMX. Defaults to 9010 if --jmx is passed."));
options.add(new Option("--jmx-authenticate",
"Sets whether JMX authentication is enabled or not. Defaults to false if --jmx is passed."));
options.add(new Option("--jmx-ssl",
"Sets whether JMX SSL is enabled or not. Defaults to false if --jmx is passed."));
return options;
}

View File

@ -48,7 +48,7 @@ core_config_version: 0
# password_reset_token_lifetime:
# (DIFFERENT_ACROSS_TENANTS | OPTIONAL | Default: 86400000 (1 day)) long value. Time in milliseconds for how long an
# (DIFFERENT_ACROSS_TENANTS | OPTIONAL | Default: 86400000 (1 day)) long value. Time in milliseconds for how long an
# email verification token / link is valid for.
# email_verification_token_lifetime:
@ -139,7 +139,7 @@ core_config_version: 0
# (OPTIONAL | Default: null). This is used when deploying the core in SuperTokens SaaS infrastructure. If set, limits
# what database information is shown to / modifiable by the dev when they query the core to get the information about
# their tenants. It only exposes that information when this key is used instead of the regular api_keys config.
# their tenants. It only exposes that information when this key is used instead of the regular api_keys config.
# supertokens_saas_secret:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: null). This is used when the core needs to assume a specific CDI version
@ -147,57 +147,10 @@ core_config_version: 0
# CDI.
# supertokens_max_cdi_version:
# (OPTIONAL | Default: null) string value. If specified, the supertokens service will only load the specified CUD even
# if there are more CUDs in the database and block all other CUDs from being used from this instance.
# supertokens_saas_load_only_cud:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider
# public service.
# oauth_provider_public_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider admin
# service.
# oauth_provider_admin_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to replace the default
# consent and login URLs to {apiDomain}.
# oauth_provider_consent_login_base_url:
# (OPTIONAL | Default: oauth_provider_public_service_url) If specified, the core uses this URL to parse responses from
# the oauth provider when the oauth provider's internal address differs from the known public provider address.
# oauth_provider_url_configured_in_oauth_provider:
# (Optional | Default: null) string value. The encryption key used for saving OAuth client secret on the database.
# oauth_client_secret_encryption_key:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: number of available processor cores) int value. If specified,
# the supertokens core will use the specified number of threads to complete the migration of users.
# bulk_migration_parallelism:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 8000) int value. If specified, the supertokens core will load the
# specified number of users for migrating in one single batch.
# bulk_migration_batch_size:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 3600000) long value. Time in milliseconds for how long a webauthn
# account recovery token is valid for.
# webauthn_recover_account_token_lifetime:
# (OPTIONAL | Default: null) string value. The URL of the OpenTelemetry collector to which the core
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:
# (OPTIONAL | Default: false) boolean value. Enables or disables the deadlock logger.
# deadlock_logger_enable:
# (OPTIONAL | Default: null) string value. If specified, uses this URL as ACS URL for handling legacy SAML clients
# saml_legacy_acs_url:
# (OPTIONAL | Default: https://saml.supertokens.com) string value. Service provider's entity ID.
# saml_sp_entity_id:
# OPTIONAL | Default: 300000) long value. Duration for which SAML claims will be valid before it is consumed
# saml_claims_validity:
# OPTIONAL | Default: 300000) long value. Duration for which SAML relay state will be valid before it is consumed
# saml_relay_state_validity:

View File

@ -16,13 +16,6 @@
"2.19",
"2.20",
"2.21",
"3.0",
"3.1",
"4.0",
"5.0",
"5.1",
"5.2",
"5.3",
"5.4"
"3.0"
]
}
}

View File

@ -49,7 +49,7 @@ core_config_version: 0
# password_reset_token_lifetime:
# (DIFFERENT_ACROSS_TENANTS | OPTIONAL | Default: 86400000 (1 day)) long value. Time in milliseconds for how long an
# (DIFFERENT_ACROSS_TENANTS | OPTIONAL | Default: 86400000 (1 day)) long value. Time in milliseconds for how long an
# email verification token / link is valid for.
# email_verification_token_lifetime:
@ -140,7 +140,7 @@ disable_telemetry: true
# (OPTIONAL | Default: null). This is used when deploying the core in SuperTokens SaaS infrastructure. If set, limits
# what database information is shown to / modifiable by the dev when they query the core to get the information about
# their tenants. It only exposes that information when this key is used instead of the regular api_keys config.
# their tenants. It only exposes that information when this key is used instead of the regular api_keys config.
# supertokens_saas_secret:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: null). This is used when the core needs to assume a specific CDI version
@ -152,52 +152,6 @@ disable_telemetry: true
# if there are more CUDs in the database and block all other CUDs from being used from this instance.
# supertokens_saas_load_only_cud:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider
# public service.
# oauth_provider_public_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider admin
# service.
# oauth_provider_admin_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to replace the default
# consent and login URLs to {apiDomain}.
# oauth_provider_consent_login_base_url:
# (OPTIONAL | Default: oauth_provider_public_service_url) If specified, the core uses this URL to parse responses from
# the oauth provider when the oauth provider's internal address differs from the known public provider address.
# oauth_provider_url_configured_in_oauth_provider:
# (Optional | Default: null) string value. The encryption key used for saving OAuth client secret on the database.
# oauth_client_secret_encryption_key:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: number of available processor cores) int value. If specified,
# the supertokens core will use the specified number of threads to complete the migration of users.
# bulk_migration_parallelism:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 8000) int value. If specified, the supertokens core will load the
# specified number of users for migrating in one single batch.
# bulk_migration_batch_size:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 3600000) long value. Time in milliseconds for how long a webauthn
# account recovery token is valid for.
# webauthn_recover_account_token_lifetime:
# (OPTIONAL | Default: null) string value. The URL of the OpenTelemetry collector to which the core
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:
# (OPTIONAL | Default: false) boolean value. Enables or disables the deadlock logger.
# deadlock_logger_enable:
# (OPTIONAL | Default: null) string value. If specified, uses this URL as ACS URL for handling legacy SAML clients
saml_legacy_acs_url: "http://localhost:5225/api/oauth/saml"
# (OPTIONAL | Default: https://saml.supertokens.com) string value. Service provider's entity ID.
# saml_sp_entity_id:
# OPTIONAL | Default: 300000) long value. Duration for which SAML claims will be valid before it is consumed
# saml_claims_validity:
# OPTIONAL | Default: 300000) long value. Duration for which SAML relay state will be valid before it is consumed
# saml_relay_state_validity:

View File

@ -18,9 +18,9 @@ dependencies {
testImplementation group: 'junit', name: 'junit', version: '4.12'
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
test {
@ -56,10 +56,10 @@ tasks.withType(Test) {
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
@ -69,7 +69,7 @@ tasks.withType(Test) {
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

Binary file not shown.

View File

@ -14,7 +14,6 @@ exitIfNeeded
exitIfNeeded
(cd ../../ && ./gradlew :$prefix-core:downloader:copyJars < /dev/null)
exitIfNeeded

View File

@ -2,12 +2,10 @@ plugins {
id 'java-library'
}
version = 'unspecified'
version 'unspecified'
repositories {
mavenCentral()
maven { url 'https://build.shibboleth.net/nexus/content/repositories/releases/' }
}
jar {
@ -15,7 +13,7 @@ jar {
}
dependencies {
compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
compileOnly project(":supertokens-plugin-interface")
testImplementation project(":supertokens-plugin-interface")
@ -37,13 +35,13 @@ dependencies {
testImplementation group: 'org.mockito', name: 'mockito-core', version: '3.1.0'
// https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-core
testImplementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '11.0.5'
testImplementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '10.1.18'
// https://mvnrepository.com/artifact/ch.qos.logback/logback-classic
testImplementation group: 'ch.qos.logback', name: 'logback-classic', version: '1.5.13'
testImplementation group: 'ch.qos.logback', name: 'logback-classic', version: '1.4.14'
// https://mvnrepository.com/artifact/com.google.code.gson/gson
testImplementation group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
testImplementation group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
testImplementation 'com.tngtech.archunit:archunit-junit4:0.22.0'
@ -54,18 +52,17 @@ dependencies {
testImplementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
testImplementation group: 'org.jetbrains', name: 'annotations', version: '13.0'
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
def interfaceName = "io.supertokens.featureflag.EEFeatureFlagInterface"
def className = "io.supertokens.ee.EEFeatureFlag"
tasks.register('generateMetaInf') {
task generateMetaInf {
doFirst {
mkdir "src/main/resources/META-INF/services"
file("src/main/resources/META-INF/services/${interfaceName}").text = "${className}"
@ -92,10 +89,10 @@ tasks.withType(Test) {
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
@ -105,7 +102,7 @@ tasks.withType(Test) {
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

Binary file not shown.

View File

@ -14,7 +14,6 @@ exitIfNeeded
exitIfNeeded
(cd ../../ && ./gradlew :$prefix-core:ee:copyJars < /dev/null)
exitIfNeeded

View File

@ -24,7 +24,6 @@ import io.supertokens.pluginInterface.ActiveUsersStorage;
import io.supertokens.pluginInterface.KeyValueInfo;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeStorage;
import io.supertokens.pluginInterface.dashboard.sqlStorage.DashboardSQLStorage;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
@ -33,8 +32,6 @@ import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.ThirdPartyConfig;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.oauth.OAuthStorage;
import io.supertokens.pluginInterface.saml.SAMLStorage;
import io.supertokens.pluginInterface.session.sqlStorage.SessionSQLStorage;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.utils.Utils;
@ -64,7 +61,7 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
private static List<JsonObject> licenseCheckRequests = new ArrayList<>();
private static final String[] ENTERPRISE_THIRD_PARTY_IDS = new String[]{
private static final String[] ENTERPRISE_THIRD_PARTY_IDS = new String[] {
"google-workspaces",
"okta",
"active-directory",
@ -109,8 +106,7 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
try {
this.syncFeatureFlagWithLicenseKey();
} catch (HttpResponseException | IOException e) {
Logging.error(main, appIdentifier.getAsPublicTenantIdentifier(), "API Error during constructor sync", false,
e);
Logging.error(main, appIdentifier.getAsPublicTenantIdentifier(), "API Error during constructor sync", false, e);
// server request failed. we ignore for now as later on it will sync up anyway.
} catch (InvalidLicenseKeyException ignored) {
// the license key that was in the db was invalid. If this error is thrown,
@ -183,13 +179,42 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
private JsonObject getDashboardLoginStats() throws TenantOrAppNotFoundException, StorageQueryException {
JsonObject stats = new JsonObject();
int userCount = ((DashboardSQLStorage) StorageLayer.getStorage(this.appIdentifier.getAsPublicTenantIdentifier(),
main))
int userCount = ((DashboardSQLStorage) StorageLayer.getStorage(this.appIdentifier.getAsPublicTenantIdentifier(), main))
.getAllDashboardUsers(this.appIdentifier).length;
stats.addProperty("user_count", userCount);
return stats;
}
private JsonObject getTOTPStats() throws StorageQueryException, TenantOrAppNotFoundException {
JsonObject totpStats = new JsonObject();
JsonArray totpMauArr = new JsonArray();
Storage[] storages = StorageLayer.getStoragesForApp(main, this.appIdentifier);
// TODO Active users are present only on public tenant and TOTP users may be present on different storages
Storage publicTenantStorage = StorageLayer.getStorage(this.appIdentifier.getAsPublicTenantIdentifier(), main);
final long now = System.currentTimeMillis();
for (int i = 1; i <= 31; i++) {
long timestamp = now - (i * 24 * 60 * 60 * 1000L);
int totpMau = 0;
// TODO Need to figure out a way to combine the data from different storages to get the final stats
// for (Storage storage : storages) {
totpMau += ((ActiveUsersStorage) publicTenantStorage).countUsersEnabledTotpAndActiveSince(this.appIdentifier, timestamp);
// }
totpMauArr.add(new JsonPrimitive(totpMau));
}
totpStats.add("maus", totpMauArr);
int totpTotalUsers = 0;
for (Storage storage : storages) {
totpTotalUsers += ((ActiveUsersStorage) storage).countUsersEnabledTotp(this.appIdentifier);
}
totpStats.addProperty("total_users", totpTotalUsers);
return totpStats;
}
private boolean isEnterpriseThirdPartyId(String thirdPartyId) {
for (String enterpriseThirdPartyId : ENTERPRISE_THIRD_PARTY_IDS) {
if (thirdPartyId.startsWith(enterpriseThirdPartyId)) {
@ -199,41 +224,6 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
return false;
}
private JsonObject getMFAStats() throws StorageQueryException, TenantOrAppNotFoundException {
// TODO: Active users are present only on public tenant and MFA users may be
// present on different storages
JsonObject result = new JsonObject();
// Commenting out these stats for now as they are very CPU intensive and reduces the performance
// of other API calls while this is running.
// Also, we are not currently using these stats.
// Storage[] storages = StorageLayer.getStoragesForApp(main, this.appIdentifier);
// int totalUserCountWithMoreThanOneLoginMethod = 0;
// int[] maus = new int[31];
// long now = System.currentTimeMillis();
// for (Storage storage : storages) {
// totalUserCountWithMoreThanOneLoginMethod += ((AuthRecipeStorage) storage)
// .getUsersCountWithMoreThanOneLoginMethodOrTOTPEnabled(this.appIdentifier);
// for (int i = 1; i <= 31; i++) {
// long timestamp = now - (i * 24 * 60 * 60 * 1000L);
// // `maus[i-1]` since i starts from 1
// maus[i - 1] += ((ActiveUsersStorage) storage)
// .countUsersThatHaveMoreThanOneLoginMethodOrTOTPEnabledAndActiveSince(appIdentifier, timestamp);
// }
// }
// result.addProperty("totalUserCountWithMoreThanOneLoginMethodOrTOTPEnabled",
// totalUserCountWithMoreThanOneLoginMethod);
// result.add("mauWithMoreThanOneLoginMethodOrTOTPEnabled", new Gson().toJsonTree(maus));
return result;
}
private JsonObject getMultiTenancyStats()
throws TenantOrAppNotFoundException, StorageQueryException {
JsonObject stats = new JsonObject();
@ -252,25 +242,9 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
Storage storage = StorageLayer.getStorage(tenantConfig.tenantIdentifier, main);
long usersCount = ((AuthRecipeStorage) storage).getUsersCount(tenantConfig.tenantIdentifier, null);
boolean hasUsersOrSessions = (usersCount > 0);
hasUsersOrSessions = hasUsersOrSessions ||
((SessionSQLStorage) storage).getNumberOfSessions(tenantConfig.tenantIdentifier) > 0;
hasUsersOrSessions = hasUsersOrSessions || ((SessionSQLStorage) storage).getNumberOfSessions(tenantConfig.tenantIdentifier) > 0;
tenantStat.addProperty("usersCount", usersCount);
tenantStat.addProperty("hasUsersOrSessions", hasUsersOrSessions);
if (tenantConfig.firstFactors != null) {
JsonArray firstFactors = new JsonArray();
for (String firstFactor : tenantConfig.firstFactors) {
firstFactors.add(new JsonPrimitive(firstFactor));
}
tenantStat.add("firstFactors", firstFactors);
}
if (tenantConfig.requiredSecondaryFactors != null) {
JsonArray requiredSecondaryFactors = new JsonArray();
for (String requiredSecondaryFactor : tenantConfig.requiredSecondaryFactors) {
requiredSecondaryFactors.add(new JsonPrimitive(requiredSecondaryFactor));
}
tenantStat.add("requiredSecondaryFactors", requiredSecondaryFactors);
}
try {
tenantStat.addProperty("userPoolId", Utils.hashSHA256(storage.getUserPoolId()));
@ -280,15 +254,12 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
}
{
boolean hasEnterpriseLogin = false;
if (tenantConfig.thirdPartyConfig.providers != null) {
for (ThirdPartyConfig.Provider provider : tenantConfig.thirdPartyConfig.providers) {
if (isEnterpriseThirdPartyId(provider.thirdPartyId)) {
hasEnterpriseLogin = true;
break;
}
for (ThirdPartyConfig.Provider provider : tenantConfig.thirdPartyConfig.providers) {
if (isEnterpriseThirdPartyId(provider.thirdPartyId)) {
hasEnterpriseLogin = true;
break;
}
}
tenantStat.addProperty("hasEnterpriseLogin", hasEnterpriseLogin);
}
@ -300,79 +271,6 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
return stats;
}
private JsonObject getAccountLinkingStats() throws StorageQueryException, TenantOrAppNotFoundException {
JsonObject result = new JsonObject();
Storage[] storages = StorageLayer.getStoragesForApp(main, this.appIdentifier);
boolean usesAccountLinking = false;
for (Storage storage : storages) {
if (((AuthRecipeStorage) storage).checkIfUsesAccountLinking(this.appIdentifier)) {
usesAccountLinking = true;
break;
}
}
result.addProperty("usesAccountLinking", usesAccountLinking);
// Commenting out these stats for now as they are very CPU intensive and reduces the performance
// of other API calls while this is running.
// Also, we are not currently using these stats.
// if (!usesAccountLinking) {
// result.addProperty("totalUserCountWithMoreThanOneLoginMethod", 0);
// JsonArray mauArray = new JsonArray();
// for (int i = 0; i < 31; i++) {
// mauArray.add(new JsonPrimitive(0));
// }
// result.add("mauWithMoreThanOneLoginMethod", mauArray);
// return result;
// }
// int totalUserCountWithMoreThanOneLoginMethod = 0;
// int[] maus = new int[31];
// long now = System.currentTimeMillis();
// for (Storage storage : storages) {
// totalUserCountWithMoreThanOneLoginMethod += ((AuthRecipeStorage) storage).getUsersCountWithMoreThanOneLoginMethod(
// this.appIdentifier);
// for (int i = 1; i <= 31; i++) {
// long timestamp = now - (i * 24 * 60 * 60 * 1000L);
// // `maus[i-1]` because i starts from 1
// maus[i - 1] += ((ActiveUsersStorage) storage).countUsersThatHaveMoreThanOneLoginMethodAndActiveSince(
// appIdentifier, timestamp);
// }
// }
// result.addProperty("totalUserCountWithMoreThanOneLoginMethod", totalUserCountWithMoreThanOneLoginMethod);
// result.add("mauWithMoreThanOneLoginMethod", new Gson().toJsonTree(maus));
return result;
}
private JsonObject getOAuthStats() throws StorageQueryException, TenantOrAppNotFoundException {
JsonObject result = new JsonObject();
OAuthStorage oAuthStorage = StorageUtils.getOAuthStorage(StorageLayer.getStorage(
this.appIdentifier.getAsPublicTenantIdentifier(), main));
result.addProperty("totalNumberOfClients", oAuthStorage.countTotalNumberOfOAuthClients(appIdentifier));
result.addProperty("numberOfClientCredentialsOnlyClients", oAuthStorage.countTotalNumberOfClientCredentialsOnlyOAuthClients(appIdentifier));
result.addProperty("numberOfM2MTokensAlive", oAuthStorage.countTotalNumberOfOAuthM2MTokensAlive(appIdentifier));
long now = System.currentTimeMillis();
JsonArray tokensCreatedArray = new JsonArray();
for (int i = 1; i <= 31; i++) {
long timestamp = now - (i * 24 * 60 * 60 * 1000L);
int numberOfTokensCreated = oAuthStorage.countTotalNumberOfOAuthM2MTokensCreatedSince(this.appIdentifier, timestamp);
tokensCreatedArray.add(new JsonPrimitive(numberOfTokensCreated));
}
result.add("numberOfM2MTokensCreated", tokensCreatedArray);
return result;
}
private JsonArray getMAUs() throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray mauArr = new JsonArray();
long now = System.currentTimeMillis();
@ -387,50 +285,20 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
return mauArr;
}
private JsonObject getSAMLStats() throws TenantOrAppNotFoundException, StorageQueryException {
JsonObject stats = new JsonObject();
stats.addProperty("connectionUriDomain", this.appIdentifier.getConnectionUriDomain());
stats.addProperty("appId", this.appIdentifier.getAppId());
JsonArray tenantStats = new JsonArray();
TenantConfig[] tenantConfigs = Multitenancy.getAllTenantsForApp(this.appIdentifier, main);
for (TenantConfig tenantConfig : tenantConfigs) {
JsonObject tenantStat = new JsonObject();
tenantStat.addProperty("tenantId", tenantConfig.tenantIdentifier.getTenantId());
{
Storage storage = StorageLayer.getStorage(tenantConfig.tenantIdentifier, main);
SAMLStorage samlStorage = StorageUtils.getSAMLStorage(storage);
JsonObject stat = new JsonObject();
stat.addProperty("numberOfSAMLClients", samlStorage.countSAMLClients(tenantConfig.tenantIdentifier));
stat.add(tenantConfig.tenantIdentifier.getTenantId(), stat);
}
}
stats.add("tenants", tenantStats);
return stats;
}
@Override
public JsonObject getPaidFeatureStats() throws StorageQueryException, TenantOrAppNotFoundException {
JsonObject usageStats = new JsonObject();
if (StorageLayer.getStorage(this.appIdentifier.getAsPublicTenantIdentifier(), main).getType() !=
STORAGE_TYPE.SQL) {
if (StorageLayer.getStorage(this.appIdentifier.getAsPublicTenantIdentifier(), main).getType() != STORAGE_TYPE.SQL) {
return usageStats;
}
EE_FEATURES[] features = getEnabledEEFeaturesFromDbOrCache();
if (!this.appIdentifier.equals(new AppIdentifier(null, null)) && !Arrays.asList(features)
.contains(EE_FEATURES.MULTI_TENANCY)) { // Check for multitenancy on the base app
if (!this.appIdentifier.equals(new AppIdentifier(null, null)) && !Arrays.asList(features).contains(EE_FEATURES.MULTI_TENANCY)) { // Check for multitenancy on the base app
EE_FEATURES[] baseFeatures = FeatureFlag.getInstance(main, new AppIdentifier(null, null))
.getEnabledFeatures();
for (EE_FEATURES feature : baseFeatures) {
for (EE_FEATURES feature: baseFeatures) {
if (feature == EE_FEATURES.MULTI_TENANCY) {
features = Arrays.copyOf(features, features.length + 1);
features[features.length - 1] = EE_FEATURES.MULTI_TENANCY;
@ -443,29 +311,13 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
usageStats.add(EE_FEATURES.DASHBOARD_LOGIN.toString(), getDashboardLoginStats());
}
if (feature == EE_FEATURES.MFA) {
usageStats.add(EE_FEATURES.MFA.toString(), getMFAStats());
if (feature == EE_FEATURES.TOTP) {
usageStats.add(EE_FEATURES.TOTP.toString(), getTOTPStats());
}
if (feature == EE_FEATURES.MULTI_TENANCY) {
usageStats.add(EE_FEATURES.MULTI_TENANCY.toString(), getMultiTenancyStats());
}
if (feature == EE_FEATURES.ACCOUNT_LINKING) {
usageStats.add(EE_FEATURES.ACCOUNT_LINKING.toString(), getAccountLinkingStats());
}
if (feature == EE_FEATURES.SECURITY) {
usageStats.add(EE_FEATURES.SECURITY.toString(), new JsonObject());
}
if (feature == EE_FEATURES.OAUTH) {
usageStats.add(EE_FEATURES.OAUTH.toString(), getOAuthStats());
}
if (feature == EE_FEATURES.SAML) {
usageStats.add(EE_FEATURES.SAML.toString(), getSAMLStats());
}
}
usageStats.add("maus", getMAUs());
@ -538,8 +390,7 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
throws StorageQueryException, HttpResponseException, IOException, InvalidLicenseKeyException,
TenantOrAppNotFoundException {
try {
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(),
"Making API call to server with licenseKey: " + licenseKey);
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(), "Making API call to server with licenseKey: " + licenseKey);
JsonObject json = new JsonObject();
KeyValueInfo info = Telemetry.getTelemetryId(main, this.appIdentifier);
String telemetryId = info == null ? null : info.value;
@ -553,10 +404,9 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
if (Main.isTesting) {
licenseCheckRequests.add(json);
}
ProcessState.getInstance(main)
.addState(ProcessState.PROCESS_STATE.LICENSE_KEY_CHECK_NETWORK_CALL, null, json);
ProcessState.getInstance(main).addState(ProcessState.PROCESS_STATE.LICENSE_KEY_CHECK_NETWORK_CALL, null);
JsonObject licenseCheckResponse = HttpRequest.sendJsonPOSTRequest(this.main, REQUEST_ID,
"https://api.supertokens.com/0/st/license/check",
"https://api.supertokens.io/0/st/license/check",
json, 10000, 10000, 0);
if (licenseCheckResponse.get("status").getAsString().equalsIgnoreCase("OK")) {
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(), "API returned OK");
@ -586,8 +436,7 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray json = new JsonArray();
Arrays.stream(features).forEach(ee_features -> json.add(new JsonPrimitive(ee_features.toString())));
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(),
"Saving new feature flag in database: " + json);
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(), "Saving new feature flag in database: " + json);
StorageLayer.getStorage(this.appIdentifier.getAsPublicTenantIdentifier(), main)
.setKeyValue(this.appIdentifier.getAsPublicTenantIdentifier(), FEATURE_FLAG_KEY_IN_DB,
new KeyValueInfo(json.toString()));

View File

@ -44,14 +44,14 @@ public class TestMultitenancyStats {
String[] args = {"../../"};
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
CronTaskTest.getInstance(process.getProcess()).setIntervalInSeconds(EELicenseCheck.RESOURCE_KEY, 1);
CronTaskTest.getInstance(process.main).setIntervalInSeconds(EELicenseCheck.RESOURCE_KEY, 1);
Assert.assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
if (StorageLayer.getStorage(process.getProcess()).getType() != STORAGE_TYPE.SQL) {
return;
}
if (StorageLayer.isInMemDb(process.getProcess())) {
if (StorageLayer.isInMemDb(process.main)) {
// cause we keep all features enabled in memdb anyway
return;
}
@ -78,7 +78,6 @@ public class TestMultitenancyStats {
new EmailPasswordConfig(true),
new ThirdPartyConfig(true, null),
new PasswordlessConfig(true),
null, null,
config
), false);
@ -87,7 +86,6 @@ public class TestMultitenancyStats {
new EmailPasswordConfig(true),
new ThirdPartyConfig(true, null),
new PasswordlessConfig(true),
null, null,
config
), false);
@ -96,7 +94,6 @@ public class TestMultitenancyStats {
new EmailPasswordConfig(true),
new ThirdPartyConfig(true, null),
new PasswordlessConfig(true),
null, null,
config
), false);
}

View File

@ -16,7 +16,7 @@ public class TestingProcessManager {
String[] args = {"../../"};
TestingProcess process = TestingProcessManager.start(args);
process.checkOrWaitForEvent(PROCESS_STATE.STARTED);
process.getProcess().deleteAllInformationForTesting();
process.main.deleteAllInformationForTesting();
process.kill();
System.out.println("----------DELETE ALL INFORMATION----------");
}

View File

@ -24,8 +24,7 @@ public abstract class Utils extends Mockito {
try {
// remove config.yaml file
String workerId = System.getProperty("org.gradle.test.worker", "");
ProcessBuilder pb = new ProcessBuilder("rm", "config" + workerId + ".yaml");
ProcessBuilder pb = new ProcessBuilder("rm", "config.yaml");
pb.directory(new File(installDir));
Process process = pb.start();
process.waitFor();
@ -59,8 +58,7 @@ public abstract class Utils extends Mockito {
// if the default config is not the same as the current config, we must reset the storage layer
File ogConfig = new File("../../temp/config.yaml");
String workerId = System.getProperty("org.gradle.test.worker", "");
File currentConfig = new File("../../config" + workerId + ".yaml");
File currentConfig = new File("../../config.yaml");
if (currentConfig.isFile()) {
byte[] ogConfigContent = Files.readAllBytes(ogConfig.toPath());
byte[] currentConfigContent = Files.readAllBytes(currentConfig.toPath());
@ -69,7 +67,7 @@ public abstract class Utils extends Mockito {
}
}
ProcessBuilder pb = new ProcessBuilder("cp", "temp/config.yaml", "./config" + workerId + ".yaml");
ProcessBuilder pb = new ProcessBuilder("cp", "temp/config.yaml", "./config.yaml");
pb.directory(new File(installDir));
Process process = pb.start();
process.waitFor();
@ -98,15 +96,14 @@ public abstract class Utils extends Mockito {
String newStr = "\n# " + key + ":";
StringBuilder originalFileContent = new StringBuilder();
String workerId = System.getProperty("org.gradle.test.worker", "");
try (BufferedReader reader = new BufferedReader(new FileReader("../../config" + workerId + ".yaml"))) {
try (BufferedReader reader = new BufferedReader(new FileReader("../../config.yaml"))) {
String currentReadingLine = reader.readLine();
while (currentReadingLine != null) {
originalFileContent.append(currentReadingLine).append(System.lineSeparator());
currentReadingLine = reader.readLine();
}
String modifiedFileContent = originalFileContent.toString().replaceAll(oldStr, newStr);
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config" + workerId + ".yaml"))) {
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config.yaml"))) {
writer.write(modifiedFileContent);
}
}
@ -120,15 +117,14 @@ public abstract class Utils extends Mockito {
String oldStr = "\n((#\\s)?)" + key + "(:|((:\\s).+))\n";
String newStr = "\n" + key + ": " + value + "\n";
StringBuilder originalFileContent = new StringBuilder();
String workerId = System.getProperty("org.gradle.test.worker", "");
try (BufferedReader reader = new BufferedReader(new FileReader("../../config" + workerId + ".yaml"))) {
try (BufferedReader reader = new BufferedReader(new FileReader("../../config.yaml"))) {
String currentReadingLine = reader.readLine();
while (currentReadingLine != null) {
originalFileContent.append(currentReadingLine).append(System.lineSeparator());
currentReadingLine = reader.readLine();
}
String modifiedFileContent = originalFileContent.toString().replaceAll(oldStr, newStr);
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config" + workerId + ".yaml"))) {
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config.yaml"))) {
writer.write(modifiedFileContent);
}
}

View File

@ -38,14 +38,14 @@ public class DeleteLicenseKeyAPITest {
@Test
public void testDeletingLicenseKeyWhenItIsNotSet() throws Exception {
String[] args = {"../../"};
String[] args = { "../../" };
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
Assert.assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
// check that no LicenseKey exits
try {
FeatureFlag.getInstance(process.getProcess()).getLicenseKey();
FeatureFlag.getInstance(process.main).getLicenseKey();
fail();
} catch (NoLicenseKeyFoundException ignored) {
}
@ -58,7 +58,7 @@ public class DeleteLicenseKeyAPITest {
// check that no LicenseKey exits
try {
FeatureFlag.getInstance(process.getProcess()).getLicenseKey();
FeatureFlag.getInstance(process.main).getLicenseKey();
fail();
} catch (NoLicenseKeyFoundException ignored) {
}
@ -69,7 +69,7 @@ public class DeleteLicenseKeyAPITest {
@Test
public void testDeletingLicenseKey() throws Exception {
String[] args = {"../../"};
String[] args = { "../../" };
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
Assert.assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
@ -90,7 +90,7 @@ public class DeleteLicenseKeyAPITest {
// check that no LicenseKey exits
try {
FeatureFlag.getInstance(process.getProcess()).getLicenseKey();
FeatureFlag.getInstance(process.main).getLicenseKey();
fail();
} catch (NoLicenseKeyFoundException ignored) {
}

View File

@ -38,7 +38,7 @@ public class GetFeatureFlagAPITest {
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
Assert.assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
if (StorageLayer.isInMemDb(process.getProcess())) {
if (StorageLayer.isInMemDb(process.main)) {
// cause we keep all features enabled in memdb anyway
return;
}
@ -72,7 +72,7 @@ public class GetFeatureFlagAPITest {
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
Assert.assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
if (StorageLayer.isInMemDb(process.getProcess())) {
if (StorageLayer.isInMemDb(process.main)) {
// cause we keep all features enabled in memdb anyway
return;
}

View File

@ -85,7 +85,7 @@ public class GetLicenseKeyAPITest {
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
Assert.assertNull(FeatureFlag.getInstance(process.getProcess()).getEeFeatureFlagInstance());
Assert.assertNull(FeatureFlag.getInstance(process.main).getEeFeatureFlagInstance());
Assert.assertEquals(FeatureFlag.getInstance(process.getProcess()).getEnabledFeatures().length, 0);

View File

@ -74,9 +74,9 @@ public class SetLicenseKeyAPITest {
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
Assert.assertNull(FeatureFlag.getInstance(process.getProcess()).getEeFeatureFlagInstance());
Assert.assertNull(FeatureFlag.getInstance(process.main).getEeFeatureFlagInstance());
Assert.assertEquals(0, FeatureFlag.getInstance(process.getProcess()).getEnabledFeatures().length);
Assert.assertEquals(FeatureFlag.getInstance(process.getProcess()).getEnabledFeatures().length, 0);
// set license key when ee folder does not exist
JsonObject requestBody = new JsonObject();

View File

@ -2,44 +2,34 @@
"_comment": "Contains list of implementation dependencies URL for this project. This is a generated file, don't modify the contents by hand.",
"list": [
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/11.0.12/tomcat-embed-core-11.0.12.jar",
"name":"tomcat-embed-core 11.0.12",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/11.0.12/tomcat-embed-core-11.0.12-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name":"gson 2.3.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/11.0.12/tomcat-annotations-api-11.0.12.jar",
"name":"tomcat-annotations-api 11.0.12",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/11.0.12/tomcat-annotations-api-11.0.12-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name":"jackson-dataformat-yaml 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1.jar",
"name":"gson 2.13.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name":"snakeyaml 2.2",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0.jar",
"name":"error_prone_annotations 2.38.0",
"src":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name":"jackson-databind 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2.jar",
"name":"jackson-dataformat-yaml 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18.jar",
"name":"tomcat-embed-core 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3.jar",
"name":"snakeyaml 2.3",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.18.2/jackson-dataformat-cbor-2.18.2.jar",
"name":"jackson-dataformat-cbor 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.18.2/jackson-dataformat-cbor-2.18.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2.jar",
"name":"jackson-databind 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18.jar",
"name":"tomcat-annotations-api 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
@ -96,151 +86,6 @@
"name":"libphonenumber 8.13.25",
"src":"https://repo.maven.apache.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/webauthn4j/webauthn4j-core/0.28.6.RELEASE/webauthn4j-core-0.28.6.RELEASE.jar",
"name":"webauthn4j-core 0.28.6.RELEASE",
"src":"https://repo.maven.apache.org/maven2/com/webauthn4j/webauthn4j-core/0.28.6.RELEASE/webauthn4j-core-0.28.6.RELEASE-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-core/4.3.1/opensaml-core-4.3.1.jar",
"name":"opensaml-core 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-core/4.3.1/opensaml-core-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/net/shibboleth/utilities/java-support/8.4.1/java-support-8.4.1.jar",
"name":"java-support 8.4.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/net/shibboleth/utilities/java-support/8.4.1/java-support-8.4.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/guava/guava/31.1-jre/guava-31.1-jre.jar",
"name":"guava 31.1-jre",
"src":"https://repo.maven.apache.org/maven2/com/google/guava/guava/31.1-jre/guava-31.1-jre-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/guava/failureaccess/1.0.1/failureaccess-1.0.1.jar",
"name":"failureaccess 1.0.1",
"src":"https://repo.maven.apache.org/maven2/com/google/guava/failureaccess/1.0.1/failureaccess-1.0.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar",
"name":"listenablefuture 9999.0-empty-to-avoid-conflict-with-guava",
"src":"https://repo.maven.apache.org/maven2/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/j2objc/j2objc-annotations/1.3/j2objc-annotations-1.3.jar",
"name":"j2objc-annotations 1.3",
"src":"https://repo.maven.apache.org/maven2/com/google/j2objc/j2objc-annotations/1.3/j2objc-annotations-1.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/dropwizard/metrics/metrics-core/4.2.25/metrics-core-4.2.25.jar",
"name":"metrics-core 4.2.25",
"src":"https://repo.maven.apache.org/maven2/io/dropwizard/metrics/metrics-core/4.2.25/metrics-core-4.2.25-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-impl/4.3.1/opensaml-saml-impl-4.3.1.jar",
"name":"opensaml-saml-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-impl/4.3.1/opensaml-saml-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-impl/4.3.1/opensaml-xmlsec-impl-4.3.1.jar",
"name":"opensaml-xmlsec-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-impl/4.3.1/opensaml-xmlsec-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-impl/4.3.1/opensaml-security-impl-4.3.1.jar",
"name":"opensaml-security-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-impl/4.3.1/opensaml-security-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-api/4.3.1/opensaml-security-api-4.3.1.jar",
"name":"opensaml-security-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-api/4.3.1/opensaml-security-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-messaging-api/4.3.1/opensaml-messaging-api-4.3.1.jar",
"name":"opensaml-messaging-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-messaging-api/4.3.1/opensaml-messaging-api-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.5.14/httpclient-4.5.14.jar",
"name":"httpclient 4.5.14",
"src":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.5.14/httpclient-4.5.14-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.4.16/httpcore-4.4.16.jar",
"name":"httpcore 4.4.16",
"src":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.4.16/httpcore-4.4.16-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/cryptacular/cryptacular/1.2.5/cryptacular-1.2.5.jar",
"name":"cryptacular 1.2.5",
"src":"https://repo.maven.apache.org/maven2/org/cryptacular/cryptacular/1.2.5/cryptacular-1.2.5-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcprov-jdk18on/1.72/bcprov-jdk18on-1.72.jar",
"name":"bcprov-jdk18on 1.72",
"src":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcprov-jdk18on/1.72/bcprov-jdk18on-1.72-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcpkix-jdk18on/1.72/bcpkix-jdk18on-1.72.jar",
"name":"bcpkix-jdk18on 1.72",
"src":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcpkix-jdk18on/1.72/bcpkix-jdk18on-1.72-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcutil-jdk18on/1.72/bcutil-jdk18on-1.72.jar",
"name":"bcutil-jdk18on 1.72",
"src":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcutil-jdk18on/1.72/bcutil-jdk18on-1.72-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-api/4.3.1/opensaml-xmlsec-api-4.3.1.jar",
"name":"opensaml-xmlsec-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-api/4.3.1/opensaml-xmlsec-api-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/santuario/xmlsec/2.3.4/xmlsec-2.3.4.jar",
"name":"xmlsec 2.3.4",
"src":"https://repo.maven.apache.org/maven2/org/apache/santuario/xmlsec/2.3.4/xmlsec-2.3.4-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-api/4.3.1/opensaml-saml-api-4.3.1.jar",
"name":"opensaml-saml-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-api/4.3.1/opensaml-saml-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-api/4.3.1/opensaml-profile-api-4.3.1.jar",
"name":"opensaml-profile-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-api/4.3.1/opensaml-profile-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-api/4.3.1/opensaml-soap-api-4.3.1.jar",
"name":"opensaml-soap-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-api/4.3.1/opensaml-soap-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-impl/4.3.1/opensaml-soap-impl-4.3.1.jar",
"name":"opensaml-soap-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-impl/4.3.1/opensaml-soap-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-storage-api/4.3.1/opensaml-storage-api-4.3.1.jar",
"name":"opensaml-storage-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-storage-api/4.3.1/opensaml-storage-api-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/velocity/velocity-engine-core/2.3/velocity-engine-core-2.3.jar",
"name":"velocity-engine-core 2.3",
"src":"https://repo.maven.apache.org/maven2/org/apache/velocity/velocity-engine-core/2.3/velocity-engine-core-2.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11.jar",
"name":"commons-lang3 3.11",
"src":"https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-impl/4.3.1/opensaml-profile-impl-4.3.1.jar",
"name":"opensaml-profile-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-impl/4.3.1/opensaml-profile-impl-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/1.5.18/logback-core-1.5.18.jar",
"name":"logback-core 1.5.18",
@ -251,11 +96,6 @@
"name":"logback-classic 1.5.18",
"src":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/1.5.18/logback-classic-1.5.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/aspectj/aspectjrt/1.9.24/aspectjrt-1.9.24.jar",
"name":"aspectjrt 1.9.24",
"src":"https://repo.maven.apache.org/maven2/org/aspectj/aspectjrt/1.9.24/aspectjrt-1.9.24-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-api/1.51.0/opentelemetry-api-1.51.0.jar",
"name":"opentelemetry-api 1.51.0",

Binary file not shown.

BIN
jar/core-6.0.19.jar Normal file

Binary file not shown.

View File

@ -1 +0,0 @@
node_modules

View File

@ -1,143 +0,0 @@
/*
* Copyright (c) 2023, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
const libphonenumber = require('libphonenumber-js/max');
// Update the following credentials before running the script
const DB_HOST = "";
const DB_USER = "";
const DB_PASSWORD = "";
const DB_NAME = "";
const CLIENT = ""; // Use "pg" for PostgreSQL and "mysql2" for MySQL DB
const MIN_POOL_SIZE = 0;
const MAX_POOL_SIZE = 5;
const QUERY_TIMEOUT = 60000;
if (!DB_HOST || !CLIENT) {
console.error('Please update the DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE and CLIENT variables before running the script.');
return;
}
const knex = require('knex')({
client: CLIENT,
connection: {
host: DB_HOST,
user: DB_USER,
password: DB_PASSWORD,
database: DB_NAME,
},
pool: {min: MIN_POOL_SIZE, max: MAX_POOL_SIZE}
});
function getUpdatePromise(table, entry, normalizedPhoneNumber) {
if (table === 'passwordless_devices') {
return knex.raw(`UPDATE ${table}
SET phone_number = ?
WHERE app_id = ?
AND tenant_id = ?
AND device_id_hash = ?`, [normalizedPhoneNumber, entry.app_id, entry.tenant_id, entry.device_id_hash]).timeout(QUERY_TIMEOUT, {cancel: true});
} else if (table === 'passwordless_users') {
// Since passwordless_users and passwordless_user_to_tenant are consistent. We can update both tables at the same time. For consistency, we will use a transaction.
return knex.transaction(async trx => {
await trx.raw(`UPDATE passwordless_users
SET phone_number = ?
WHERE app_id = ?
AND user_id = ?`, [normalizedPhoneNumber, entry.app_id, entry.user_id]).timeout(QUERY_TIMEOUT, {cancel: true});
await trx.raw(`UPDATE passwordless_user_to_tenant
SET phone_number = ?
WHERE app_id = ?
AND user_id = ?`, [normalizedPhoneNumber, entry.app_id, entry.user_id]).timeout(QUERY_TIMEOUT, {cancel: true});
});
} else {
throw new Error(`Invalid table name: ${table}`);
}
}
function getNormalizedPhoneNumber(phoneNumber) {
try {
return libphonenumber.parsePhoneNumber(phoneNumber, {extract: false}).format('E.164');
} catch (error) {
return null;
}
}
async function updatePhoneNumbers(table) {
const batchSize = 1000;
let offset = 0;
let totalUpdatedRows = 0;
try {
let totalRows = await knex.raw(`SELECT COUNT(*) as count
FROM ${table}
WHERE phone_number is NOT NULL`);
totalRows = totalRows.rows ? totalRows.rows[0].count : totalRows[0][0].count;
while (true) {
const entries = await knex.raw(`SELECT *
FROM ${table}
WHERE phone_number is NOT NULL LIMIT ${batchSize}
OFFSET ${offset}`);
// In PostgreSQL, all rows are returned in `entries.rows`, whereas in MySQL, they can be found in `entries[0]`.
const rows = entries.rows ? entries.rows : entries[0];
const batchUpdates = [];
for (const entry of rows) {
const currentPhoneNumber = entry.phone_number;
const normalizedPhoneNumber = getNormalizedPhoneNumber(currentPhoneNumber);
if (normalizedPhoneNumber && normalizedPhoneNumber !== currentPhoneNumber) {
const updatePromise = getUpdatePromise(table, entry, normalizedPhoneNumber);
batchUpdates.push(updatePromise);
}
}
await Promise.all(batchUpdates);
offset += rows.length;
totalUpdatedRows += batchUpdates.length;
console.log(`Processed ${offset} out of ${totalRows} rows in table ${table}; ${totalUpdatedRows} rows updated`);
if (rows.length < batchSize) {
break;
}
}
} catch (error) {
console.error(`Error normalising phone numbers for table ${table}: Retry running the script and if the error persists after retrying then create an issue at https://github.com/supertokens/supertokens-core/issues`);
throw error;
}
}
async function runScript() {
const tables = ['passwordless_users', 'passwordless_devices'];
try {
for (const table of tables) {
await updatePhoneNumbers(table);
console.log(`\n\n\n`);
}
console.log('Finished normalising phone numbers!');
} catch (error) {
console.error(error);
} finally {
knex.destroy();
}
}
runScript();

View File

@ -1,502 +0,0 @@
{
"name": "to_version_7_1",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "to_version_7_1",
"version": "1.0.0",
"license": "Apache-2.0",
"dependencies": {
"knex": "^3.0.1",
"libphonenumber-js": "^1.10.49",
"mysql2": "^3.6.3",
"pg": "^8.11.3"
},
"engines": {
"node": ">=16.0.0"
}
},
"node_modules/buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==",
"engines": {
"node": ">=4"
}
},
"node_modules/colorette": {
"version": "2.0.19",
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz",
"integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ=="
},
"node_modules/commander": {
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz",
"integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==",
"engines": {
"node": ">=14"
}
},
"node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/denque": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
"engines": {
"node": ">=0.10"
}
},
"node_modules/escalade": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
"engines": {
"node": ">=6"
}
},
"node_modules/esm": {
"version": "3.2.25",
"resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz",
"integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==",
"engines": {
"node": ">=6"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/generate-function": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
"integrity": "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==",
"dependencies": {
"is-property": "^1.0.2"
}
},
"node_modules/get-package-type": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
"integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==",
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/getopts": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/getopts/-/getopts-2.3.0.tgz",
"integrity": "sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA=="
},
"node_modules/hasown": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz",
"integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/iconv-lite": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/interpret": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz",
"integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==",
"engines": {
"node": ">= 0.10"
}
},
"node_modules/is-core-module": {
"version": "2.13.1",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz",
"integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==",
"dependencies": {
"hasown": "^2.0.0"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-property": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz",
"integrity": "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g=="
},
"node_modules/knex": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/knex/-/knex-3.0.1.tgz",
"integrity": "sha512-ruASxC6xPyDklRdrcDy6a9iqK+R9cGK214aiQa+D9gX2ZnHZKv6o6JC9ZfgxILxVAul4bZ13c3tgOAHSuQ7/9g==",
"dependencies": {
"colorette": "2.0.19",
"commander": "^10.0.0",
"debug": "4.3.4",
"escalade": "^3.1.1",
"esm": "^3.2.25",
"get-package-type": "^0.1.0",
"getopts": "2.3.0",
"interpret": "^2.2.0",
"lodash": "^4.17.21",
"pg-connection-string": "2.6.1",
"rechoir": "^0.8.0",
"resolve-from": "^5.0.0",
"tarn": "^3.0.2",
"tildify": "2.0.0"
},
"bin": {
"knex": "bin/cli.js"
},
"engines": {
"node": ">=16"
},
"peerDependenciesMeta": {
"better-sqlite3": {
"optional": true
},
"mysql": {
"optional": true
},
"mysql2": {
"optional": true
},
"pg": {
"optional": true
},
"pg-native": {
"optional": true
},
"sqlite3": {
"optional": true
},
"tedious": {
"optional": true
}
}
},
"node_modules/libphonenumber-js": {
"version": "1.10.49",
"resolved": "https://registry.npmjs.org/libphonenumber-js/-/libphonenumber-js-1.10.49.tgz",
"integrity": "sha512-gvLtyC3tIuqfPzjvYLH9BmVdqzGDiSi4VjtWe2fAgSdBf0yt8yPmbNnRIHNbR5IdtVkm0ayGuzwQKTWmU0hdjQ=="
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/long": {
"version": "5.2.3",
"resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz",
"integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q=="
},
"node_modules/lru-cache": {
"version": "8.0.5",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-8.0.5.tgz",
"integrity": "sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA==",
"engines": {
"node": ">=16.14"
}
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/mysql2": {
"version": "3.6.3",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.6.3.tgz",
"integrity": "sha512-qYd/1CDuW1KYZjD4tzg2O8YS3X/UWuGH8ZMHyMeggMTXL3yOdMisbwZ5SNkHzDGlZXKYLAvV8tMrEH+NUMz3fw==",
"dependencies": {
"denque": "^2.1.0",
"generate-function": "^2.3.1",
"iconv-lite": "^0.6.3",
"long": "^5.2.1",
"lru-cache": "^8.0.0",
"named-placeholders": "^1.1.3",
"seq-queue": "^0.0.5",
"sqlstring": "^2.3.2"
},
"engines": {
"node": ">= 8.0"
}
},
"node_modules/mysql2/node_modules/sqlstring": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.3.tgz",
"integrity": "sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/named-placeholders": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/named-placeholders/-/named-placeholders-1.1.3.tgz",
"integrity": "sha512-eLoBxg6wE/rZkJPhU/xRX1WTpkFEwDJEN96oxFrTsqBdbT5ec295Q+CoHrL9IT0DipqKhmGcaZmwOt8OON5x1w==",
"dependencies": {
"lru-cache": "^7.14.1"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/named-placeholders/node_modules/lru-cache": {
"version": "7.18.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
"engines": {
"node": ">=12"
}
},
"node_modules/packet-reader": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
},
"node_modules/path-parse": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="
},
"node_modules/pg": {
"version": "8.11.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.11.3.tgz",
"integrity": "sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==",
"dependencies": {
"buffer-writer": "2.0.0",
"packet-reader": "1.0.0",
"pg-connection-string": "^2.6.2",
"pg-pool": "^3.6.1",
"pg-protocol": "^1.6.0",
"pg-types": "^2.1.0",
"pgpass": "1.x"
},
"engines": {
"node": ">= 8.0.0"
},
"optionalDependencies": {
"pg-cloudflare": "^1.1.1"
},
"peerDependencies": {
"pg-native": ">=3.0.1"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-cloudflare": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz",
"integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==",
"optional": true
},
"node_modules/pg-connection-string": {
"version": "2.6.1",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.1.tgz",
"integrity": "sha512-w6ZzNu6oMmIzEAYVw+RLK0+nqHPt8K3ZnknKi+g48Ak2pr3dtljJW3o+D/n2zzCG07Zoe9VOX3aiKpj+BN0pjg=="
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.6.1",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.1.tgz",
"integrity": "sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz",
"integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q=="
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pg/node_modules/pg-connection-string": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.2.tgz",
"integrity": "sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA=="
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/rechoir": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz",
"integrity": "sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==",
"dependencies": {
"resolve": "^1.20.0"
},
"engines": {
"node": ">= 10.13.0"
}
},
"node_modules/resolve": {
"version": "1.22.8",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz",
"integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==",
"dependencies": {
"is-core-module": "^2.13.0",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
"bin": {
"resolve": "bin/resolve"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/resolve-from": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
"integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
"engines": {
"node": ">=8"
}
},
"node_modules/safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/seq-queue": {
"version": "0.0.5",
"resolved": "https://registry.npmjs.org/seq-queue/-/seq-queue-0.0.5.tgz",
"integrity": "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q=="
},
"node_modules/split2": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
"engines": {
"node": ">= 10.x"
}
},
"node_modules/supports-preserve-symlinks-flag": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
"integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/tarn": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz",
"integrity": "sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==",
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/tildify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz",
"integrity": "sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==",
"engines": {
"node": ">=8"
}
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"engines": {
"node": ">=0.4"
}
}
}
}

View File

@ -1,21 +0,0 @@
{
"name": "to_version_7_0_12",
"version": "1.0.0",
"engines": {
"node": ">=16.0.0"
},
"description": "A migration script that normalizes phone numbers in the database",
"main": "index.js",
"scripts": {
"start": "node index.js"
},
"keywords": [],
"author": "",
"license": "Apache-2.0",
"dependencies": {
"knex": "^3.0.1",
"libphonenumber-js": "^1.10.49",
"mysql2": "^3.6.3",
"pg": "^8.11.3"
}
}

View File

@ -1,6 +1,6 @@
{
"_comment": "contains a list of plugin interfaces branch names that this core supports",
"versions": [
"8.3"
"3.0"
]
}

View File

@ -1,22 +1,17 @@
package io.supertokens;
import io.supertokens.pluginInterface.ActiveUsersSQLStorage;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.AppIdentifierWithStorage;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import org.jetbrains.annotations.TestOnly;
public class ActiveUsers {
public static void updateLastActive(AppIdentifier appIdentifier, Main main, String userId)
public static void updateLastActive(AppIdentifierWithStorage appIdentifierWithStorage, Main main, String userId)
throws TenantOrAppNotFoundException {
Storage storage = StorageLayer.getStorage(appIdentifier.getAsPublicTenantIdentifier(), main);
try {
StorageUtils.getActiveUsersStorage(storage).updateLastActive(appIdentifier, userId);
appIdentifierWithStorage.getActiveUsersStorage().updateLastActive(appIdentifierWithStorage, userId);
} catch (StorageQueryException ignored) {
}
}
@ -24,37 +19,22 @@ public class ActiveUsers {
@TestOnly
public static void updateLastActive(Main main, String userId) {
try {
ActiveUsers.updateLastActive(ResourceDistributor.getAppForTesting().toAppIdentifier(),
main, userId);
ActiveUsers.updateLastActive(new AppIdentifierWithStorage(null, null, StorageLayer.getStorage(main)), main,
userId);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
}
public static int countUsersActiveSince(Main main, AppIdentifier appIdentifier, long time)
public static int countUsersActiveSince(AppIdentifierWithStorage appIdentifierWithStorage, Main main, long time)
throws StorageQueryException, TenantOrAppNotFoundException {
Storage storage = StorageLayer.getStorage(appIdentifier.getAsPublicTenantIdentifier(), main);
return StorageUtils.getActiveUsersStorage(storage).countUsersActiveSince(appIdentifier, time);
}
public static void updateLastActiveAfterLinking(Main main, AppIdentifier appIdentifier, String primaryUserId,
String recipeUserId)
throws StorageQueryException, TenantOrAppNotFoundException, StorageTransactionLogicException {
ActiveUsersSQLStorage activeUsersStorage =
(ActiveUsersSQLStorage) StorageUtils.getActiveUsersStorage(
StorageLayer.getStorage(appIdentifier.getAsPublicTenantIdentifier(), main));
activeUsersStorage.startTransaction(con -> {
activeUsersStorage.deleteUserActive_Transaction(con, appIdentifier, recipeUserId);
return null;
});
updateLastActive(appIdentifier, main, primaryUserId);
return appIdentifierWithStorage.getActiveUsersStorage().countUsersActiveSince(appIdentifierWithStorage, time);
}
@TestOnly
public static int countUsersActiveSince(Main main, long time)
throws StorageQueryException, TenantOrAppNotFoundException {
return countUsersActiveSince(main, ResourceDistributor.getAppForTesting().toAppIdentifier(), time);
return countUsersActiveSince(new AppIdentifierWithStorage(null, null, StorageLayer.getStorage(main)), main,
time);
}
}

View File

@ -16,21 +16,23 @@
package io.supertokens;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.multitenancy.AppIdentifierWithStorage;
import io.supertokens.pluginInterface.useridmapping.UserIdMapping;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class StorageAndUserIdMapping {
public class AppIdentifierWithStorageAndUserIdMapping {
@Nullable
public final io.supertokens.pluginInterface.useridmapping.UserIdMapping userIdMapping;
@Nonnull
public final Storage storage;
public final AppIdentifierWithStorage appIdentifierWithStorage;
public StorageAndUserIdMapping(Storage storage, UserIdMapping userIdMapping) {
this.storage = storage;
public AppIdentifierWithStorageAndUserIdMapping(AppIdentifierWithStorage appIdentifierWithStorage, UserIdMapping userIdMapping) {
this.appIdentifierWithStorage = appIdentifierWithStorage;
this.userIdMapping = userIdMapping;
assert(this.appIdentifierWithStorage != null);
}
}

View File

@ -20,11 +20,6 @@ import io.supertokens.cliOptions.CLIOptions;
import io.supertokens.config.Config;
import io.supertokens.config.CoreConfig;
import io.supertokens.cronjobs.Cronjobs;
import io.supertokens.cronjobs.bulkimport.ProcessBulkImportUsers;
import io.supertokens.cronjobs.cleanupOAuthSessionsAndChallenges.CleanupOAuthSessionsAndChallenges;
import io.supertokens.cronjobs.deleteExpiredSAMLData.DeleteExpiredSAMLData;
import io.supertokens.cronjobs.cleanupWebauthnExpiredData.CleanUpWebauthNExpiredDataCron;
import io.supertokens.cronjobs.deadlocklogger.DeadlockLogger;
import io.supertokens.cronjobs.deleteExpiredAccessTokenSigningKeys.DeleteExpiredAccessTokenSigningKeys;
import io.supertokens.cronjobs.deleteExpiredDashboardSessions.DeleteExpiredDashboardSessions;
import io.supertokens.cronjobs.deleteExpiredEmailVerificationTokens.DeleteExpiredEmailVerificationTokens;
@ -44,7 +39,10 @@ import io.supertokens.pluginInterface.exceptions.DbInitException;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.saml.SAMLBootstrap;
import io.supertokens.session.refreshToken.RefreshTokenKey;
import io.supertokens.signingkeys.AccessTokenSigningKey;
import io.supertokens.signingkeys.JWTSigningKey;
import io.supertokens.signingkeys.SigningKeys;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.version.Version;
@ -67,8 +65,6 @@ public class Main {
// this is a special variable that will be set to true by TestingProcessManager
public static boolean isTesting = false;
// this flag is used in ProcessBulkImportUsersCronJobTest to skip the user validation
public static boolean isTesting_skipBulkImportUserValidationInCronJob = false;
// this is a special variable that will be set to true by TestingProcessManager
public static boolean makeConsolePrintSilent = false;
@ -95,9 +91,6 @@ public class Main {
private boolean waitToEnableFeatureFlag = false;
private final Object waitToEnableFeatureFlagLock = new Object();
//setting to true by default
private final Boolean bulkMigrationCronEnabled = System.getenv("BULK_MIGRATION_CRON_ENABLED") == null || Boolean.parseBoolean(System.getenv("BULK_MIGRATION_CRON_ENABLED"));
private boolean forceInMemoryDB = false;
@ -124,8 +117,6 @@ public class Main {
CLIOptions.load(this, args);
init();
} catch (Exception e) {
Logging.error(this, TenantIdentifier.BASE_TENANT, "What caused the crash: " + e.getMessage(), true,
e);
ProcessState.getInstance(this).addState(ProcessState.PROCESS_STATE.INIT_FAILURE, e);
throw e;
}
@ -160,12 +151,21 @@ public class Main {
// Handle kill signal gracefully
handleKillSignalForWhenItHappens();
StorageLayer.loadStorageUCL(CLIOptions.get(this).getInstallationPath() + "plugin/");
// loading configs for core from config.yaml file.
try {
Config.loadBaseConfig(this);
Logging.info(this, TenantIdentifier.BASE_TENANT, "Completed config.yaml loading.", true);
} catch (InvalidConfigException e) {
throw new QuitProgramException(e);
}
Logging.info(this, TenantIdentifier.BASE_TENANT, "Completed config.yaml loading.", true);
TelemetryProvider.initialize(this);
// loading storage layer
try {
StorageLayer.initPrimary(this, CLIOptions.get(this).getInstallationPath() + "plugin/",
Config.getBaseConfigAsJsonObject(this));
} catch (InvalidConfigException e) {
throw new QuitProgramException(e);
}
@ -173,21 +173,9 @@ public class Main {
// loading version file
Version.loadVersion(this, CLIOptions.get(this).getInstallationPath() + "version.yaml");
TelemetryProvider.initialize(this);
// loading storage layer
try {
StorageLayer.initPrimary(this, Config.getBaseConfigAsJsonObject(this));
} catch (InvalidConfigException e) {
throw new QuitProgramException(e);
}
// init file logging
Logging.initFileLogging(this);
// Required for SAML related stuff
SAMLBootstrap.initialize();
// initialise cron job handler
Cronjobs.init(this);
@ -202,7 +190,7 @@ public class Main {
}
}
try {
StorageLayer.getBaseStorage(this).initStorage(true, List.of());
StorageLayer.getBaseStorage(this).initStorage(true);
} catch (DbInitException e) {
throw new QuitProgramException(e);
}
@ -223,13 +211,7 @@ public class Main {
// load all configs for each of the tenants.
MultitenancyHelper.getInstance(this).loadConfig(new ArrayList<>());
if (!StorageLayer.isInMemDb(this)) {
// we want to init storage connection once again so that the base storage also contains the right
// tenant identifier set passed to the init. So we call the
// resetPostConnectCallbackForBaseTenantStorage.
StorageLayer.getBaseStorage(this).close();
}
// init storage layers for each unique db connection based on unique (user pool ID, connection pool ID).
MultitenancyHelper.getInstance(this).loadStorageLayer();
} catch (InvalidConfigException e) {
throw new QuitProgramException(e);
@ -275,22 +257,6 @@ public class Main {
// starts DeleteExpiredAccessTokenSigningKeys cronjob if the access token signing keys can change
Cronjobs.addCronjob(this, DeleteExpiredAccessTokenSigningKeys.init(this, uniqueUserPoolIdsTenants));
// initializes ProcessBulkImportUsers cronjob to process bulk import users
if(bulkMigrationCronEnabled) {
Cronjobs.addCronjob(this, ProcessBulkImportUsers.init(this, uniqueUserPoolIdsTenants));
}
Cronjobs.addCronjob(this, CleanupOAuthSessionsAndChallenges.init(this, uniqueUserPoolIdsTenants));
Cronjobs.addCronjob(this, CleanUpWebauthNExpiredDataCron.init(this, uniqueUserPoolIdsTenants));
// starts the DeadlockLogger if
if (Config.getBaseConfig(this).isDeadlockLoggerEnabled()) {
DeadlockLogger.getInstance().start();
}
Cronjobs.addCronjob(this, DeleteExpiredSAMLData.init(this, uniqueUserPoolIdsTenants));
// this is to ensure tenantInfos are in sync for the new cron job as well
MultitenancyHelper.getInstance(this).refreshCronjobs();
@ -301,7 +267,6 @@ public class Main {
Webserver.getInstance(this).start();
// this is a sign to the controlling script that this process has started.
createDotStartedFileForThisProcess();
// NOTE: If the message below is changed, make sure to also change the corresponding check in the CLI program
@ -380,16 +345,11 @@ public class Main {
}
private void createDotStartedFileForThisProcess() throws IOException {
String startedDir = ".started";
if (isTesting) {
startedDir = ".started" + System.getProperty("org.gradle.test.worker", "");
}
CoreConfig config = Config.getBaseConfig(this);
String fileLocation = CLIOptions.get(this).getTempDirLocation() == null ? CLIOptions.get(this).getInstallationPath() : CLIOptions.get(this).getTempDirLocation();
String fileName = OperatingSystem.getOS() == OperatingSystem.OS.WINDOWS
? fileLocation + startedDir + "\\" + config.getHost(this) + "-"
? CLIOptions.get(this).getInstallationPath() + ".started\\" + config.getHost(this) + "-"
+ config.getPort(this)
: fileLocation + startedDir + "/" + config.getHost(this) + "-"
: CLIOptions.get(this).getInstallationPath() + ".started/" + config.getHost(this) + "-"
+ config.getPort(this);
File dotStarted = new File(fileName);
if (!dotStarted.exists()) {
@ -432,10 +392,9 @@ public class Main {
@TestOnly
public void killForTestingAndWaitForShutdown() throws InterruptedException {
// Do not kill for now
assertIsTesting();
wakeUpMainThreadToShutdown();
mainThread.join();
assertIsTesting();
wakeUpMainThreadToShutdown();
mainThread.join();
}
// must not throw any error

View File

@ -16,7 +16,6 @@
package io.supertokens;
import com.google.gson.JsonObject;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
@ -52,12 +51,8 @@ public class ProcessState extends ResourceDistributor.SingletonResource {
}
public synchronized void addState(PROCESS_STATE processState, Exception e) {
addState(processState, e, null);
}
public synchronized void addState(PROCESS_STATE processState, Exception e, JsonObject data) {
if (Main.isTesting) {
history.add(new EventAndException(processState, e, data));
history.add(new EventAndException(processState, e));
}
}
@ -68,8 +63,7 @@ public class ProcessState extends ResourceDistributor.SingletonResource {
/**
* INIT: Initialization started INIT_FAILURE: Initialization failed
* STARTED: Initialized successfully SHUTTING_DOWN: Shut down signal received STOPPED
* RETRYING_ACCESS_TOKEN_JWT_VERIFICATION: When access token verification fails due to change in signing key, so
* we retry it
* RETRYING_ACCESS_TOKEN_JWT_VERIFICATION: When access token verification fails due to change in signing key, so we retry it
* CRON_TASK_ERROR_LOGGING: When an exception is thrown from a Cronjob
* DEVICE_DRIVER_INFO_LOGGED:When program is saving deviceDriverInfo into ping
* SERVER_PING: When program is pinging the server with information
@ -103,19 +97,11 @@ public class ProcessState extends ResourceDistributor.SingletonResource {
public static class EventAndException {
public Exception exception;
public JsonObject data;
public PROCESS_STATE state;
PROCESS_STATE state;
public EventAndException(PROCESS_STATE state, Exception e) {
this.state = state;
this.exception = e;
this.data = null;
}
public EventAndException(PROCESS_STATE state, Exception e, JsonObject data) {
this.state = state;
this.exception = e;
this.data = data;
}
}

View File

@ -35,28 +35,16 @@ public class ResourceDistributor {
private final Map<KeyClass, SingletonResource> resources = new HashMap<>(1);
private final Main main;
private static TenantIdentifier appUsedForTesting = TenantIdentifier.BASE_TENANT;
public ResourceDistributor(Main main) {
this.main = main;
}
@TestOnly
public static void setAppForTesting(TenantIdentifier app) {
appUsedForTesting = app;
}
@TestOnly
public static TenantIdentifier getAppForTesting() {
return appUsedForTesting;
}
public SingletonResource getResource(AppIdentifier appIdentifier, @Nonnull String key)
public synchronized SingletonResource getResource(AppIdentifier appIdentifier, @Nonnull String key)
throws TenantOrAppNotFoundException {
return getResource(appIdentifier.getAsPublicTenantIdentifier(), key);
}
public SingletonResource getResource(TenantIdentifier tenantIdentifier, @Nonnull String key)
public synchronized SingletonResource getResource(TenantIdentifier tenantIdentifier, @Nonnull String key)
throws TenantOrAppNotFoundException {
// first we do exact match
SingletonResource resource = resources.get(new KeyClass(tenantIdentifier, key));
@ -70,6 +58,14 @@ public class ResourceDistributor {
throw new TenantOrAppNotFoundException(tenantIdentifier);
}
MultitenancyHelper.getInstance(main).refreshTenantsInCoreBasedOnChangesInCoreConfigOrIfTenantListChanged(true);
// we try again..
resource = resources.get(new KeyClass(tenantIdentifier, key));
if (resource != null) {
return resource;
}
// then we see if the user has configured anything to do with connectionUriDomain, and if they have,
// then we must return null cause the user has not specifically added tenantId to it
for (KeyClass currKey : resources.keySet()) {
@ -93,11 +89,11 @@ public class ResourceDistributor {
}
@TestOnly
public SingletonResource getResource(@Nonnull String key) {
return resources.get(new KeyClass(appUsedForTesting, key));
public synchronized SingletonResource getResource(@Nonnull String key) {
return resources.get(new KeyClass(new TenantIdentifier(null, null, null), key));
}
public SingletonResource setResource(TenantIdentifier tenantIdentifier,
public synchronized SingletonResource setResource(TenantIdentifier tenantIdentifier,
@Nonnull String key,
SingletonResource resource) {
SingletonResource alreadyExists = resources.get(new KeyClass(tenantIdentifier, key));
@ -108,8 +104,8 @@ public class ResourceDistributor {
return resource;
}
public SingletonResource removeResource(TenantIdentifier tenantIdentifier,
@Nonnull String key) {
public synchronized SingletonResource removeResource(TenantIdentifier tenantIdentifier,
@Nonnull String key) {
SingletonResource singletonResource = resources.get(new KeyClass(tenantIdentifier, key));
if (singletonResource == null) {
return null;
@ -118,18 +114,18 @@ public class ResourceDistributor {
return singletonResource;
}
public SingletonResource setResource(AppIdentifier appIdentifier,
public synchronized SingletonResource setResource(AppIdentifier appIdentifier,
@Nonnull String key,
SingletonResource resource) {
return setResource(appIdentifier.getAsPublicTenantIdentifier(), key, resource);
}
public SingletonResource removeResource(AppIdentifier appIdentifier,
public synchronized SingletonResource removeResource(AppIdentifier appIdentifier,
@Nonnull String key) {
return removeResource(appIdentifier.getAsPublicTenantIdentifier(), key);
}
public void clearAllResourcesWithResourceKey(String inputKey) {
public synchronized void clearAllResourcesWithResourceKey(String inputKey) {
List<KeyClass> toRemove = new ArrayList<>();
resources.forEach((key, value) -> {
if (key.key.equals(inputKey)) {
@ -141,7 +137,7 @@ public class ResourceDistributor {
}
}
public Map<KeyClass, SingletonResource> getAllResourcesWithResourceKey(String inputKey) {
public synchronized Map<KeyClass, SingletonResource> getAllResourcesWithResourceKey(String inputKey) {
Map<KeyClass, SingletonResource> result = new HashMap<>();
resources.forEach((key, value) -> {
if (key.key.equals(inputKey)) {
@ -152,9 +148,9 @@ public class ResourceDistributor {
}
@TestOnly
public SingletonResource setResource(@Nonnull String key,
public synchronized SingletonResource setResource(@Nonnull String key,
SingletonResource resource) {
return setResource(appUsedForTesting, key, resource);
return setResource(new TenantIdentifier(null, null, null), key, resource);
}
public interface Func<T> {

View File

@ -1,31 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.useridmapping.UserIdMapping;
public class StorageAndUserIdMappingForBulkImport extends StorageAndUserIdMapping {
public String userIdInQuestion;
public StorageAndUserIdMappingForBulkImport(Storage storage,
UserIdMapping userIdMapping, String userIdInQuestion) {
super(storage, userIdMapping);
this.userIdInQuestion = userIdInQuestion;
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2023, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifierWithStorage;
import io.supertokens.pluginInterface.useridmapping.UserIdMapping;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class TenantIdentifierWithStorageAndUserIdMapping {
@Nullable
public final io.supertokens.pluginInterface.useridmapping.UserIdMapping userIdMapping;
@Nonnull
public final TenantIdentifierWithStorage tenantIdentifierWithStorage;
public TenantIdentifierWithStorageAndUserIdMapping(TenantIdentifierWithStorage tenantIdentifierWithStorage,
UserIdMapping userIdMapping) {
this.tenantIdentifierWithStorage = tenantIdentifierWithStorage;
this.userIdMapping = userIdMapping;
assert(this.tenantIdentifierWithStorage != null);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -22,11 +22,24 @@ import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class UserPaginationContainer {
public final AuthRecipeUserInfo[] users;
public final UsersContainer[] users;
public final String nextPaginationToken;
public UserPaginationContainer(@Nonnull AuthRecipeUserInfo[] users, @Nullable String nextPaginationToken) {
this.users = users;
this.users = new UsersContainer[users.length];
for (int i = 0; i < users.length; i++) {
this.users[i] = new UsersContainer(users[i]);
}
this.nextPaginationToken = nextPaginationToken;
}
public static class UsersContainer {
public final AuthRecipeUserInfo user;
public final String recipeId;
public UsersContainer(AuthRecipeUserInfo user) {
this.user = user;
this.recipeId = user.getRecipeId().toString();
}
}
}

View File

@ -1,26 +0,0 @@
/*
* Copyright (c) 2023, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.authRecipe.exception;
public class AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException extends Exception {
public final String primaryUserId;
public AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException(String primaryUserId, String description) {
super(description);
this.primaryUserId = primaryUserId;
}
}

View File

@ -1,26 +0,0 @@
/*
* Copyright (c) 2023, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.authRecipe.exception;
public class InputUserIdIsNotAPrimaryUserException extends Exception {
public final String userId;
public InputUserIdIsNotAPrimaryUserException(String userId) {
super();
this.userId = userId;
}
}

View File

@ -1,29 +0,0 @@
/*
* Copyright (c) 2023, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.authRecipe.exception;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeUserInfo;
public class RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException extends Exception {
public final AuthRecipeUserInfo recipeUser;
public RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException(AuthRecipeUserInfo recipeUser,
String description) {
super(description);
this.recipeUser = recipeUser;
}
}

View File

@ -1,26 +0,0 @@
/*
* Copyright (c) 2023, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.authRecipe.exception;
public class RecipeUserIdAlreadyLinkedWithPrimaryUserIdException extends Exception {
public final String primaryUserId;
public RecipeUserIdAlreadyLinkedWithPrimaryUserIdException(String primaryUserId, String description) {
super(description);
this.primaryUserId = primaryUserId;
}
}

View File

@ -1,845 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.authRecipe.AuthRecipe;
import io.supertokens.authRecipe.exception.AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException;
import io.supertokens.authRecipe.exception.InputUserIdIsNotAPrimaryUserException;
import io.supertokens.authRecipe.exception.RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException;
import io.supertokens.authRecipe.exception.RecipeUserIdAlreadyLinkedWithPrimaryUserIdException;
import io.supertokens.config.Config;
import io.supertokens.emailpassword.EmailPassword;
import io.supertokens.emailpassword.PasswordHashing;
import io.supertokens.featureflag.exceptions.FeatureNotEnabledException;
import io.supertokens.multitenancy.Multitenancy;
import io.supertokens.multitenancy.exception.AnotherPrimaryUserWithEmailAlreadyExistsException;
import io.supertokens.multitenancy.exception.AnotherPrimaryUserWithPhoneNumberAlreadyExistsException;
import io.supertokens.multitenancy.exception.AnotherPrimaryUserWithThirdPartyInfoAlreadyExistsException;
import io.supertokens.output.Logging;
import io.supertokens.passwordless.Passwordless;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeUserInfo;
import io.supertokens.pluginInterface.bulkimport.BulkImportStorage.BULK_IMPORT_USER_STATUS;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.LoginMethod;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.TotpDevice;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.UserRole;
import io.supertokens.pluginInterface.bulkimport.ImportUserBase;
import io.supertokens.pluginInterface.bulkimport.exceptions.BulkImportBatchInsertException;
import io.supertokens.pluginInterface.bulkimport.sqlStorage.BulkImportSQLStorage;
import io.supertokens.pluginInterface.emailpassword.EmailPasswordImportUser;
import io.supertokens.pluginInterface.emailpassword.exceptions.DuplicateEmailException;
import io.supertokens.pluginInterface.emailpassword.exceptions.UnknownUserIdException;
import io.supertokens.pluginInterface.emailverification.sqlStorage.EmailVerificationSQLStorage;
import io.supertokens.pluginInterface.exceptions.DbInitException;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.passwordless.PasswordlessImportUser;
import io.supertokens.pluginInterface.passwordless.exception.DuplicatePhoneNumberException;
import io.supertokens.pluginInterface.sqlStorage.SQLStorage;
import io.supertokens.pluginInterface.thirdparty.ThirdPartyImportUser;
import io.supertokens.pluginInterface.thirdparty.exception.DuplicateThirdPartyUserException;
import io.supertokens.pluginInterface.totp.TOTPDevice;
import io.supertokens.pluginInterface.useridmapping.exception.UnknownSuperTokensUserIdException;
import io.supertokens.pluginInterface.useridmapping.exception.UserIdMappingAlreadyExistsException;
import io.supertokens.pluginInterface.userroles.exception.UnknownRoleException;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.thirdparty.ThirdParty;
import io.supertokens.totp.Totp;
import io.supertokens.useridmapping.UserIdMapping;
import io.supertokens.usermetadata.UserMetadata;
import io.supertokens.userroles.UserRoles;
import io.supertokens.utils.Utils;
import jakarta.servlet.ServletException;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
// Error codes ensure globally unique and identifiable errors in Bulk Import.
// Current range: E001 to E046.
public class BulkImport {
// Maximum number of users that can be added in a single /bulk-import/users POST request
public static final int MAX_USERS_TO_ADD = 10000;
// Maximum number of users to return in a single page when calling /bulk-import/users GET
public static final int GET_USERS_PAGINATION_MAX_LIMIT = 500;
// Default number of users to return when no specific limit is given in /bulk-import/users GET
public static final int GET_USERS_DEFAULT_LIMIT = 100;
// Maximum number of users that can be deleted in a single operation
public static final int DELETE_USERS_MAX_LIMIT = 500;
// Time interval in seconds between two consecutive runs of ProcessBulkImportUsers Cron Job
public static final int PROCESS_USERS_INTERVAL_SECONDS = 5*60; // 5 minutes
private static final Logger log = LoggerFactory.getLogger(BulkImport.class);
// This map allows reusing proxy storage for all tenants in the app and closing connections after import.
private static Map<String, SQLStorage> userPoolToStorageMap = new HashMap<>();
public static void addUsers(AppIdentifier appIdentifier, Storage storage, List<BulkImportUser> users)
throws StorageQueryException, TenantOrAppNotFoundException {
while (true) {
try {
StorageUtils.getBulkImportStorage(storage).addBulkImportUsers(appIdentifier, users);
break;
} catch (StorageQueryException sqe) {
if (sqe.getCause() instanceof io.supertokens.pluginInterface.bulkimport.exceptions.DuplicateUserIdException) {
// We re-generate the user id for every user and retry
for (BulkImportUser user : users) {
user.id = Utils.getUUID();
}
} else {
throw sqe;
}
}
}
}
public static BulkImportUserPaginationContainer getUsers(AppIdentifier appIdentifier, Storage storage,
int limit, @Nullable BULK_IMPORT_USER_STATUS status, @Nullable String paginationToken)
throws StorageQueryException, BulkImportUserPaginationToken.InvalidTokenException {
List<BulkImportUser> users;
BulkImportSQLStorage bulkImportStorage = StorageUtils.getBulkImportStorage(storage);
if (paginationToken == null) {
users = bulkImportStorage
.getBulkImportUsers(appIdentifier, limit + 1, status, null, null);
} else {
BulkImportUserPaginationToken tokenInfo = BulkImportUserPaginationToken.extractTokenInfo(paginationToken);
users = bulkImportStorage
.getBulkImportUsers(appIdentifier, limit + 1, status, tokenInfo.bulkImportUserId,
tokenInfo.createdAt);
}
String nextPaginationToken = null;
int maxLoop = users.size();
if (users.size() == limit + 1) {
maxLoop = limit;
BulkImportUser user = users.get(limit);
nextPaginationToken = new BulkImportUserPaginationToken(user.id, user.createdAt).generateToken();
}
List<BulkImportUser> resultUsers = users.subList(0, maxLoop);
return new BulkImportUserPaginationContainer(resultUsers, nextPaginationToken);
}
public static List<String> deleteUsers(AppIdentifier appIdentifier, Storage storage, String[] userIds)
throws StorageQueryException {
return StorageUtils.getBulkImportStorage(storage).deleteBulkImportUsers(appIdentifier, userIds);
}
public static long getBulkImportUsersCount(AppIdentifier appIdentifier, Storage storage,
@Nullable BULK_IMPORT_USER_STATUS status)
throws StorageQueryException {
return StorageUtils.getBulkImportStorage(storage).getBulkImportUsersCount(appIdentifier, status);
}
public static synchronized AuthRecipeUserInfo importUser(Main main, AppIdentifier appIdentifier,
BulkImportUser user)
throws StorageQueryException, InvalidConfigException, IOException, TenantOrAppNotFoundException,
DbInitException, BulkImportBatchInsertException {
// Since all the tenants of a user must share the storage, we will just use the
// storage of the first tenantId of the first loginMethod
TenantIdentifier firstTenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), user.loginMethods.get(0).tenantIds.get(0));
SQLStorage bulkImportProxyStorage = (SQLStorage) getBulkImportProxyStorage(main, firstTenantIdentifier);
LoginMethod primaryLM = BulkImportUserUtils.getPrimaryLoginMethod(user);
try {
return bulkImportProxyStorage.startTransaction(con -> {
try {
Storage[] allStoragesForApp = getAllProxyStoragesForApp(main, appIdentifier);
processUsersImportSteps(main, appIdentifier, bulkImportProxyStorage, List.of(user), allStoragesForApp);
bulkImportProxyStorage.commitTransactionForBulkImportProxyStorage();
AuthRecipeUserInfo importedUser = AuthRecipe.getUserById(appIdentifier, bulkImportProxyStorage,
primaryLM.superTokensUserId);
io.supertokens.useridmapping.UserIdMapping.populateExternalUserIdForUsers(appIdentifier,
bulkImportProxyStorage, new AuthRecipeUserInfo[] { importedUser });
return importedUser;
} catch (StorageTransactionLogicException e) {
// We need to rollback the transaction manually because we have overridden that in the proxy storage
bulkImportProxyStorage.rollbackTransactionForBulkImportProxyStorage();
throw e;
} finally {
closeAllProxyStorages();
}
});
} catch (StorageTransactionLogicException e) {
if(e.actualException instanceof BulkImportBatchInsertException){
throw (BulkImportBatchInsertException) e.actualException;
}
throw new StorageQueryException(e.actualException);
}
}
public static void processUsersImportSteps(Main main, AppIdentifier appIdentifier,
Storage bulkImportProxyStorage, List<BulkImportUser> users, Storage[] allStoragesForApp)
throws StorageTransactionLogicException {
try {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing login methods..");
processUsersLoginMethods(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing login methods DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating Primary users and linking accounts..");
createPrimaryUsersAndLinkAccounts(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating Primary users and linking accounts DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user id mappings..");
createMultipleUserIdMapping(appIdentifier, users, allStoragesForApp);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user id mappings DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Verifying email addresses..");
verifyMultipleEmailForAllLoginMethods(appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Verifying email addresses DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating TOTP devices..");
createMultipleTotpDevices(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating TOTP devices DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user metadata..");
createMultipleUserMetadata(appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user metadata DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user roles..");
createMultipleUserRoles(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user roles DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Effective processUsersImportSteps DONE");
} catch ( StorageQueryException | FeatureNotEnabledException |
TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(e);
}
}
public static void processUsersLoginMethods(Main main, AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users) throws StorageTransactionLogicException {
//sort login methods together
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Sorting login methods by recipeId..");
Map<String, List<LoginMethod>> sortedLoginMethods = new HashMap<>();
for (BulkImportUser user: users) {
for(LoginMethod loginMethod : user.loginMethods){
if(!sortedLoginMethods.containsKey(loginMethod.recipeId)) {
sortedLoginMethods.put(loginMethod.recipeId, new ArrayList<>());
}
sortedLoginMethods.get(loginMethod.recipeId).add(loginMethod);
}
}
List<ImportUserBase> importedUsers = new ArrayList<>();
if (sortedLoginMethods.containsKey("emailpassword")) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing emailpassword login methods..");
importedUsers.addAll(
processEmailPasswordLoginMethods(main, storage, sortedLoginMethods.get("emailpassword"),
appIdentifier));
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing emailpassword login methods DONE");
}
if (sortedLoginMethods.containsKey("thirdparty")) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing thirdparty login methods..");
importedUsers.addAll(
processThirdpartyLoginMethods(main, storage, sortedLoginMethods.get("thirdparty"),
appIdentifier));
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing thirdparty login methods DONE");
}
if (sortedLoginMethods.containsKey("passwordless")) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing passwordless login methods..");
importedUsers.addAll(processPasswordlessLoginMethods(main, appIdentifier, storage,
sortedLoginMethods.get("passwordless")));
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing passwordless login methods DONE");
}
Set<String> actualKeys = new HashSet<>(sortedLoginMethods.keySet());
List.of("emailpassword", "thirdparty", "passwordless").forEach(actualKeys::remove);
if(!actualKeys.isEmpty()){
throw new StorageTransactionLogicException(
new IllegalArgumentException("E001: Unknown recipeId(s) [" +
actualKeys.stream().map(s -> s+" ") + "] for loginMethod."));
}
Map<String, Exception> errorsById = new HashMap<>();
for (Map.Entry<String, List<LoginMethod>> loginMethodEntries : sortedLoginMethods.entrySet()) {
for (LoginMethod loginMethod : loginMethodEntries.getValue()) {
try {
associateUserToTenants(main, appIdentifier, storage, loginMethod, loginMethod.tenantIds.get(0));
} catch (StorageTransactionLogicException e){
errorsById.put(loginMethod.superTokensUserId, e.actualException);
}
}
}
if(!errorsById.isEmpty()){
throw new StorageTransactionLogicException(new BulkImportBatchInsertException("tenant association errors", errorsById));
}
}
private static List<? extends ImportUserBase> processPasswordlessLoginMethods(Main main, AppIdentifier appIdentifier, Storage storage,
List<LoginMethod> loginMethods)
throws StorageTransactionLogicException {
try {
List<PasswordlessImportUser> usersToImport = new ArrayList<>();
for (LoginMethod loginMethod : loginMethods) {
TenantIdentifier tenantIdentifierForLoginMethod = new TenantIdentifier(
appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), loginMethod.tenantIds.get(
0)); // the cron runs per app. The app stays the same, the tenant can change
usersToImport.add(new PasswordlessImportUser(loginMethod.superTokensUserId, loginMethod.phoneNumber,
loginMethod.email, tenantIdentifierForLoginMethod, loginMethod.timeJoinedInMSSinceEpoch));
}
Passwordless.createPasswordlessUsers(storage, usersToImport);
return usersToImport;
} catch (StorageQueryException | StorageTransactionLogicException e) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "exception: " + e.getMessage());
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof DuplicateEmailException) {
String message = "E006: A user with email "
+ loginMethods.stream()
.filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get().email + " already exists in passwordless loginMethod.";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof DuplicatePhoneNumberException) {
String message = "E007: A user with phoneNumber "
+ loginMethods.stream()
.filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get().phoneNumber + " already exists in passwordless loginMethod.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E008: " + e.getMessage()));
}
}
private static List<? extends ImportUserBase> processThirdpartyLoginMethods(Main main, Storage storage, List<LoginMethod> loginMethods,
AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
List<ThirdPartyImportUser> usersToImport = new ArrayList<>();
for (LoginMethod loginMethod: loginMethods){
TenantIdentifier tenantIdentifierForLoginMethod = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), loginMethod.tenantIds.get(0)); // the cron runs per app. The app stays the same, the tenant can change
usersToImport.add(new ThirdPartyImportUser(loginMethod.email, loginMethod.superTokensUserId, loginMethod.thirdPartyId,
loginMethod.thirdPartyUserId, tenantIdentifierForLoginMethod, loginMethod.timeJoinedInMSSinceEpoch));
}
ThirdParty.createMultipleThirdPartyUsers(storage, usersToImport);
return usersToImport;
} catch (StorageQueryException | StorageTransactionLogicException e) {
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof DuplicateThirdPartyUserException) {
LoginMethod loginMethodForError = loginMethods.stream()
.filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get();
String message = "E005: A user with thirdPartyId " + loginMethodForError.thirdPartyId
+ " and thirdPartyUserId " + loginMethodForError.thirdPartyUserId
+ " already exists in thirdparty loginMethod.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E004: " + e.getMessage()));
}
}
private static List<? extends ImportUserBase> processEmailPasswordLoginMethods(Main main, Storage storage, List<LoginMethod> loginMethods,
AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
//prepare data for batch import
List<EmailPasswordImportUser> usersToImport = new ArrayList<>();
for(LoginMethod emailPasswordLoginMethod : loginMethods) {
TenantIdentifier tenantIdentifierForLoginMethod = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), emailPasswordLoginMethod.tenantIds.get(0)); // the cron runs per app. The app stays the same, the tenant can change
String passwordHash = emailPasswordLoginMethod.passwordHash;
if (passwordHash == null && emailPasswordLoginMethod.plainTextPassword != null) {
passwordHash = PasswordHashing.getInstance(main)
.createHashWithSalt(tenantIdentifierForLoginMethod.toAppIdentifier(), emailPasswordLoginMethod.plainTextPassword);
}
emailPasswordLoginMethod.passwordHash = passwordHash;
usersToImport.add(new EmailPasswordImportUser(emailPasswordLoginMethod.superTokensUserId, emailPasswordLoginMethod.email,
emailPasswordLoginMethod.passwordHash, tenantIdentifierForLoginMethod, emailPasswordLoginMethod.timeJoinedInMSSinceEpoch));
}
EmailPassword.createMultipleUsersWithPasswordHash(storage, usersToImport);
return usersToImport;
} catch (StorageQueryException | StorageTransactionLogicException e) {
if(e.getCause() instanceof BulkImportBatchInsertException){
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for(String userid : errorsByPosition.keySet()){
Exception exception = errorsByPosition.get(userid);
if(exception instanceof DuplicateEmailException){
String message = "E003: A user with email "
+ loginMethods.stream().filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get().email + " already exists in emailpassword loginMethod.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E002: " + e.getMessage()));
}
}
private static void associateUserToTenants(Main main, AppIdentifier appIdentifier, Storage storage, LoginMethod lm,
String firstTenant) throws StorageTransactionLogicException {
for (String tenantId : lm.tenantIds) {
try {
if (tenantId.equals(firstTenant)) {
continue;
}
TenantIdentifier tenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), tenantId);
Multitenancy.addUserIdToTenant(main, tenantIdentifier, storage, lm.superTokensUserId);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E009: " + e.getMessage()));
} catch (StorageQueryException e) {
throw new StorageTransactionLogicException(e);
} catch (UnknownUserIdException e) {
throw new StorageTransactionLogicException(new Exception("E010: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but it doesn't exist. This should not happen. Please contact support."));
} catch (AnotherPrimaryUserWithEmailAlreadyExistsException e) {
throw new StorageTransactionLogicException(new Exception("E011: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another primary user with email " + lm.email + " already exists."));
} catch (AnotherPrimaryUserWithPhoneNumberAlreadyExistsException e) {
throw new StorageTransactionLogicException(new Exception("E012: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another primary user with phoneNumber " + lm.phoneNumber + " already exists."));
} catch (AnotherPrimaryUserWithThirdPartyInfoAlreadyExistsException e) {
throw new StorageTransactionLogicException(new Exception("E013: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another primary user with thirdPartyId " + lm.thirdPartyId + " and thirdPartyUserId "
+ lm.thirdPartyUserId + " already exists."));
} catch (DuplicateEmailException e) {
throw new StorageTransactionLogicException(new Exception("E014: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another user with email " + lm.email + " already exists."));
} catch (DuplicatePhoneNumberException e) {
throw new StorageTransactionLogicException(new Exception("E015: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another user with phoneNumber " + lm.phoneNumber + " already exists."));
} catch (DuplicateThirdPartyUserException e) {
throw new StorageTransactionLogicException(new Exception("E016: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another user with thirdPartyId " + lm.thirdPartyId + " and thirdPartyUserId "
+ lm.thirdPartyUserId + " already exists."));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E017: " + e.getMessage()));
}
}
}
private static void createPrimaryUsersAndLinkAccounts(Main main,
AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users)
throws StorageTransactionLogicException, StorageQueryException, FeatureNotEnabledException,
TenantOrAppNotFoundException {
List<BulkImportUser> usersForAccountLinking = filterUsersInNeedOfAccountLinking(users);
if(usersForAccountLinking.isEmpty()){
return;
}
AuthRecipe.CreatePrimaryUsersResultHolder resultHolder;
try {
resultHolder = AuthRecipe.createPrimaryUsersForBulkImport(main, appIdentifier, storage, usersForAccountLinking);
} catch (StorageQueryException e) {
if(e.getCause() instanceof BulkImportBatchInsertException){
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof UnknownUserIdException) {
String message = "E020: We tried to create the primary user for the userId "
+ userid
+ " but it doesn't exist. This should not happen. Please contact support.";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof RecipeUserIdAlreadyLinkedWithPrimaryUserIdException) {
String message = "E021: We tried to create the primary user for the userId "
+ userid
+ " but it is already linked with another primary user.";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException) {
String message = "E022: We tried to create the primary user for the userId "
+ userid
+ " but the account info is already associated with another primary user.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E018: " + e.getMessage()));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E019: " + e.getMessage()));
}
if(resultHolder != null && resultHolder.usersWithSameExtraData != null){
linkAccountsForMultipleUser(main, appIdentifier, storage, usersForAccountLinking, resultHolder.usersWithSameExtraData);
}
}
private static List<BulkImportUser> filterUsersInNeedOfAccountLinking(List<BulkImportUser> allUsers) {
if (allUsers == null || allUsers.isEmpty()) {
return Collections.emptyList();
}
return allUsers.stream().filter(bulkImportUser -> bulkImportUser.loginMethods.stream()
.anyMatch(loginMethod -> loginMethod.isPrimary) || bulkImportUser.loginMethods.size() > 1)
.collect(Collectors.toList());
}
private static void linkAccountsForMultipleUser(Main main, AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users, List<AuthRecipeUserInfo> allUsersWithSameExtraData)
throws StorageTransactionLogicException {
try {
AuthRecipe.linkMultipleAccountsForBulkImport(main, appIdentifier, storage,
users, allUsersWithSameExtraData);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E023: " + e.getMessage()));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E024: " + e.getMessage()));
} catch (StorageQueryException e) {
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, String> recipeUserIdByPrimaryUserId = BulkImportUserUtils.collectRecipeIdsToPrimaryIds(users);
Map<String, Exception> errorByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userId : errorByPosition.keySet()) {
Exception currentException = errorByPosition.get(userId);
String recipeUID = recipeUserIdByPrimaryUserId.get(userId);
if (currentException instanceof UnknownUserIdException) {
String message = "E025: We tried to link the userId " + recipeUID
+ " to the primary userId " + userId
+ " but it doesn't exist.";
errorByPosition.put(userId, new Exception(message));
} else if (currentException instanceof InputUserIdIsNotAPrimaryUserException) {
String message = "E026: We tried to link the userId " + recipeUID
+ " to the primary userId " + userId
+ " but it is not a primary user.";
errorByPosition.put(userId, new Exception(message));
} else if (currentException instanceof AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException) {
String message = "E027: We tried to link the userId " + userId
+ " to the primary userId " + recipeUID
+ " but the account info is already associated with another primary user.";
errorByPosition.put(userId, new Exception(message));
} else if (currentException instanceof RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException) {
String message = "E028: We tried to link the userId " + recipeUID
+ " to the primary userId " + userId
+ " but it is already linked with another primary user.";
errorByPosition.put(userId, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("link accounts translated", errorByPosition));
}
throw new StorageTransactionLogicException(e);
}
}
public static void createMultipleUserIdMapping(AppIdentifier appIdentifier,
List<BulkImportUser> users, Storage[] storages) throws StorageTransactionLogicException {
Map<String, String> superTokensUserIdToExternalUserId = new HashMap<>();
for(BulkImportUser user: users) {
if(user.externalUserId != null) {
LoginMethod primaryLoginMethod = BulkImportUserUtils.getPrimaryLoginMethod(user);
superTokensUserIdToExternalUserId.put(primaryLoginMethod.superTokensUserId, user.externalUserId);
primaryLoginMethod.externalUserId = user.externalUserId;
}
}
try {
if(!superTokensUserIdToExternalUserId.isEmpty()) {
List<UserIdMapping.UserIdBulkMappingResult> mappingResults = UserIdMapping.createMultipleUserIdMappings(
appIdentifier, storages,
superTokensUserIdToExternalUserId,
false, true);
}
} catch (StorageQueryException e) {
if(e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof ServletException) {
String message = "E030: " + e.getMessage();
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof UserIdMappingAlreadyExistsException) {
String message = "E031: A user with externalId " + superTokensUserIdToExternalUserId.get(userid) + " already exists";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof UnknownSuperTokensUserIdException) {
String message = "E032: We tried to create the externalUserId mapping for the superTokenUserId "
+ userid
+ " but it doesn't exist. This should not happen. Please contact support.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
}
}
public static void createMultipleUserMetadata(AppIdentifier appIdentifier, Storage storage, List<BulkImportUser> users)
throws StorageTransactionLogicException {
Map<String, JsonObject> usersMetadata = new HashMap<>();
for(BulkImportUser user: users) {
if (user.userMetadata != null) {
usersMetadata.put(BulkImportUserUtils.getPrimaryLoginMethod(user).getSuperTokenOrExternalUserId(), user.userMetadata);
}
}
try {
if(!usersMetadata.isEmpty()) {
UserMetadata.updateMultipleUsersMetadata(appIdentifier, storage, usersMetadata);
}
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E040: " + e.getMessage()));
} catch (StorageQueryException e) {
throw new StorageTransactionLogicException(e);
}
}
public static void createMultipleUserRoles(Main main, AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users) throws StorageTransactionLogicException {
Map<TenantIdentifier, Map<String, List<String>>> rolesToUserByTenant = gatherRolesForUsersByTenant(appIdentifier, users);
try {
if(!rolesToUserByTenant.isEmpty()){
UserRoles.addMultipleRolesToMultipleUsers(main, appIdentifier, storage, rolesToUserByTenant);
}
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E033: " + e.getMessage()));
} catch (StorageTransactionLogicException e) {
if(e.actualException instanceof BulkImportBatchInsertException){
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof UnknownRoleException) {
String message = "E034: Role does not exist! You need to pre-create the role before " +
"assigning it to the user.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(new BulkImportBatchInsertException("roles errors translated", errorsByPosition));
} else {
throw new StorageTransactionLogicException(e);
}
}
}
private static Map<TenantIdentifier, Map<String, List<String>>> gatherRolesForUsersByTenant(AppIdentifier appIdentifier, List<BulkImportUser> users) {
Map<TenantIdentifier, Map<String, List<String>>> rolesToUserByTenant = new HashMap<>();
for (BulkImportUser user : users) {
if (user.userRoles != null) {
for (UserRole userRole : user.userRoles) {
for (String tenantId : userRole.tenantIds) {
TenantIdentifier tenantIdentifier = new TenantIdentifier(
appIdentifier.getConnectionUriDomain(), appIdentifier.getAppId(),
tenantId);
if(!rolesToUserByTenant.containsKey(tenantIdentifier)){
rolesToUserByTenant.put(tenantIdentifier, new HashMap<>());
}
String userIdToUse = user.externalUserId != null ?
user.externalUserId : user.id;
if(!rolesToUserByTenant.get(tenantIdentifier).containsKey(userIdToUse)){
rolesToUserByTenant.get(tenantIdentifier).put(userIdToUse, new ArrayList<>());
}
rolesToUserByTenant.get(tenantIdentifier).get(userIdToUse).add(userRole.role);
}
}
}
}
return rolesToUserByTenant;
}
public static void verifyMultipleEmailForAllLoginMethods(AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users)
throws StorageTransactionLogicException {
Map<String, String> emailToUserId = collectVerifiedEmailAddressesByUserIds(users);
try {
verifyCollectedEmailAddressesForUsers(appIdentifier, storage, emailToUserId);
} catch (StorageQueryException | StorageTransactionLogicException e) {
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition =
((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof DuplicateEmailException) {
String message =
"E043: Email " + errorsByPosition.get(userid) + " is already verified for the user";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof NullPointerException) {
String message = "E044: null email address was found for the userId " + userid +
" while verifying the email";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
}
}
private static void verifyCollectedEmailAddressesForUsers(AppIdentifier appIdentifier, Storage storage,
Map<String, String> emailToUserId)
throws StorageQueryException, StorageTransactionLogicException {
if(!emailToUserId.isEmpty()) {
EmailVerificationSQLStorage emailVerificationSQLStorage = StorageUtils
.getEmailVerificationStorage(storage);
emailVerificationSQLStorage.startTransaction(con -> {
emailVerificationSQLStorage
.updateMultipleIsEmailVerified_Transaction(appIdentifier, con,
emailToUserId, true); //only the verified email addresses are expected to be in the map
emailVerificationSQLStorage.commitTransaction(con);
return null;
});
}
}
@NotNull
private static Map<String, String> collectVerifiedEmailAddressesByUserIds(List<BulkImportUser> users) {
Map<String, String> emailToUserId = new LinkedHashMap<>();
for (BulkImportUser user : users) {
for (LoginMethod lm : user.loginMethods) {
//we skip passwordless` 'null' email addresses
if (lm.isVerified && !(lm.recipeId.equals("passwordless") && lm.email == null)) {
//collect the verified email addresses for the userId
emailToUserId.put(lm.getSuperTokenOrExternalUserId(), lm.email);
}
}
}
return emailToUserId;
}
public static void createMultipleTotpDevices(Main main, AppIdentifier appIdentifier,
Storage storage, List<BulkImportUser> users)
throws StorageTransactionLogicException {
List<TOTPDevice> devices = new ArrayList<>();
for (BulkImportUser user : users) {
if (user.totpDevices != null) {
for(TotpDevice device : user.totpDevices){
TOTPDevice totpDevice = new TOTPDevice(BulkImportUserUtils.getPrimaryLoginMethod(user).getSuperTokenOrExternalUserId(),
device.deviceName, device.secretKey, device.period, device.skew, true,
System.currentTimeMillis());
devices.add(totpDevice);
}
}
}
try {
if(!devices.isEmpty()){
Totp.createDevices(main, appIdentifier, storage, devices);
}
} catch (StorageQueryException e) {
throw new StorageTransactionLogicException(new Exception("E036: " + e.getMessage()));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E037: " + e.getMessage()));
}
}
private static synchronized Storage getBulkImportProxyStorage(Main main, TenantIdentifier tenantIdentifier)
throws InvalidConfigException, IOException, TenantOrAppNotFoundException, DbInitException {
String userPoolId = StorageLayer.getStorage(tenantIdentifier, main).getUserPoolId();
if (userPoolToStorageMap.containsKey(userPoolId)) {
return userPoolToStorageMap.get(userPoolId);
}
TenantConfig[] allTenants = Multitenancy.getAllTenants(main);
Map<ResourceDistributor.KeyClass, JsonObject> normalisedConfigs = Config.getNormalisedConfigsForAllTenants(
allTenants,
Config.getBaseConfigAsJsonObject(main));
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
if (key.getTenantIdentifier().equals(tenantIdentifier)) {
SQLStorage bulkImportProxyStorage = (SQLStorage) StorageLayer.getNewBulkImportProxyStorageInstance(main,
normalisedConfigs.get(key), tenantIdentifier, true);
userPoolToStorageMap.put(userPoolId, bulkImportProxyStorage);
bulkImportProxyStorage.initStorage(false, new ArrayList<>());
return bulkImportProxyStorage;
}
}
throw new TenantOrAppNotFoundException(tenantIdentifier);
}
private static Storage[] getAllProxyStoragesForApp(Main main, AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
List<Storage> allProxyStorages = new ArrayList<>();
TenantConfig[] tenantConfigs = Multitenancy.getAllTenantsForApp(appIdentifier, main);
for (TenantConfig tenantConfig : tenantConfigs) {
allProxyStorages.add(getBulkImportProxyStorage(main, tenantConfig.tenantIdentifier));
}
return allProxyStorages.toArray(new Storage[0]);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E039: " + e.getMessage()));
} catch (InvalidConfigException e) {
throw new StorageTransactionLogicException(new InvalidConfigException("E040: " + e.getMessage()));
} catch (DbInitException e) {
throw new StorageTransactionLogicException(new DbInitException("E041: " + e.getMessage()));
} catch (IOException e) {
throw new StorageTransactionLogicException(new IOException("E042: " + e.getMessage()));
}
}
private static void closeAllProxyStorages() throws StorageQueryException {
for (SQLStorage storage : userPoolToStorageMap.values()) {
storage.closeConnectionForBulkImportProxyStorage();
storage.close();
}
userPoolToStorageMap.clear();
}
}

View File

@ -1,53 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport;
import java.util.Base64;
public class BulkImportUserPaginationToken {
public final String bulkImportUserId;
public final long createdAt;
public BulkImportUserPaginationToken(String bulkImportUserId, long createdAt) {
this.bulkImportUserId = bulkImportUserId;
this.createdAt = createdAt;
}
public static BulkImportUserPaginationToken extractTokenInfo(String token) throws InvalidTokenException {
try {
String decodedPaginationToken = new String(Base64.getDecoder().decode(token));
String[] splitDecodedToken = decodedPaginationToken.split(";");
if (splitDecodedToken.length != 2) {
throw new InvalidTokenException();
}
String bulkImportUserId = splitDecodedToken[0];
long createdAt = Long.parseLong(splitDecodedToken[1]);
return new BulkImportUserPaginationToken(bulkImportUserId, createdAt);
} catch (Exception e) {
throw new InvalidTokenException();
}
}
public String generateToken() {
return new String(Base64.getEncoder().encode((this.bulkImportUserId + ";" + this.createdAt).getBytes()));
}
public static class InvalidTokenException extends Exception {
private static final long serialVersionUID = 6289026174830695478L;
}
}

View File

@ -1,654 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.bulkimport.exceptions.InvalidBulkImportDataException;
import io.supertokens.config.CoreConfig;
import io.supertokens.emailpassword.PasswordHashingUtils;
import io.supertokens.emailpassword.exceptions.UnsupportedPasswordHashingFormatException;
import io.supertokens.featureflag.EE_FEATURES;
import io.supertokens.featureflag.FeatureFlag;
import io.supertokens.multitenancy.Multitenancy;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.LoginMethod;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.TotpDevice;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.UserRole;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.utils.JsonValidatorUtils.ValueType;
import io.supertokens.utils.Utils;
import java.util.*;
import static io.supertokens.utils.JsonValidatorUtils.parseAndValidateFieldType;
import static io.supertokens.utils.JsonValidatorUtils.validateJsonFieldType;
public class BulkImportUserUtils {
private String[] allUserRoles;
private Set<String> allExternalUserIds;
public BulkImportUserUtils(String[] allUserRoles) {
this.allUserRoles = allUserRoles;
this.allExternalUserIds = new HashSet<>();
}
public BulkImportUser createBulkImportUserFromJSON(Main main, AppIdentifier appIdentifier, JsonObject userData, IDMode idMode)
throws InvalidBulkImportDataException, StorageQueryException, TenantOrAppNotFoundException {
List<String> errors = new ArrayList<>();
String externalUserId = parseAndValidateFieldType(userData, "externalUserId", ValueType.STRING, false,
String.class,
errors, ".");
JsonObject userMetadata = parseAndValidateFieldType(userData, "userMetadata", ValueType.OBJECT, false,
JsonObject.class, errors, ".");
List<UserRole> userRoles = getParsedUserRoles(main, appIdentifier, userData, errors);
List<TotpDevice> totpDevices = getParsedTotpDevices(main, appIdentifier, userData, errors);
List<LoginMethod> loginMethods = getParsedLoginMethods(main, appIdentifier, userData, errors, idMode);
externalUserId = validateAndNormaliseExternalUserId(externalUserId, errors);
validateTenantIdsForRoleAndLoginMethods(main, appIdentifier, userRoles, loginMethods, errors);
if (!errors.isEmpty()) {
throw new InvalidBulkImportDataException(errors);
}
String id = getPrimaryLoginMethod(loginMethods).superTokensUserId;
return new BulkImportUser(id, externalUserId, userMetadata, userRoles, totpDevices, loginMethods);
}
private List<UserRole> getParsedUserRoles(Main main, AppIdentifier appIdentifier, JsonObject userData,
List<String> errors) throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray jsonUserRoles = parseAndValidateFieldType(userData, "userRoles", ValueType.ARRAY_OF_OBJECT, false,
JsonArray.class, errors, ".");
if (jsonUserRoles == null) {
return null;
}
List<UserRole> userRoles = new ArrayList<>();
for (JsonElement jsonUserRoleEl : jsonUserRoles) {
JsonObject jsonUserRole = jsonUserRoleEl.getAsJsonObject();
String role = parseAndValidateFieldType(jsonUserRole, "role", ValueType.STRING, true, String.class, errors,
" for a user role.");
JsonArray jsonTenantIds = parseAndValidateFieldType(jsonUserRole, "tenantIds", ValueType.ARRAY_OF_STRING,
true, JsonArray.class, errors, " for a user role.");
role = validateAndNormaliseUserRole(role, errors);
List<String> normalisedTenantIds = validateAndNormaliseTenantIds(main, appIdentifier, jsonTenantIds, errors,
" for a user role.");
if (role != null && normalisedTenantIds != null) {
userRoles.add(new UserRole(role, normalisedTenantIds));
}
}
return userRoles;
}
private List<TotpDevice> getParsedTotpDevices(Main main, AppIdentifier appIdentifier, JsonObject userData,
List<String> errors) throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray jsonTotpDevices = parseAndValidateFieldType(userData, "totpDevices", ValueType.ARRAY_OF_OBJECT, false,
JsonArray.class, errors, ".");
if (jsonTotpDevices == null) {
return null;
}
if (Arrays.stream(FeatureFlag.getInstance(main, appIdentifier).getEnabledFeatures())
.noneMatch(t -> t == EE_FEATURES.MFA)) {
errors.add("MFA must be enabled to import totp devices.");
return null;
}
List<TotpDevice> totpDevices = new ArrayList<>();
for (JsonElement jsonTotpDeviceEl : jsonTotpDevices) {
JsonObject jsonTotpDevice = jsonTotpDeviceEl.getAsJsonObject();
String secretKey = parseAndValidateFieldType(jsonTotpDevice, "secretKey", ValueType.STRING, true,
String.class, errors, " for a totp device.");
Integer period = parseAndValidateFieldType(jsonTotpDevice, "period", ValueType.INTEGER, false,
Integer.class, errors, " for a totp device.");
Integer skew = parseAndValidateFieldType(jsonTotpDevice, "skew", ValueType.INTEGER, false, Integer.class,
errors, " for a totp device.");
String deviceName = parseAndValidateFieldType(jsonTotpDevice, "deviceName", ValueType.STRING, false,
String.class, errors, " for a totp device.");
secretKey = validateAndNormaliseTotpSecretKey(secretKey, errors);
period = validateAndNormaliseTotpPeriod(period, errors);
skew = validateAndNormaliseTotpSkew(skew, errors);
deviceName = validateAndNormaliseTotpDeviceName(deviceName, errors);
if (secretKey != null && period != null && skew != null) {
totpDevices.add(new TotpDevice(secretKey, period, skew, deviceName));
}
}
return totpDevices;
}
private List<LoginMethod> getParsedLoginMethods(Main main, AppIdentifier appIdentifier, JsonObject userData,
List<String> errors, IDMode idMode)
throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray jsonLoginMethods = parseAndValidateFieldType(userData, "loginMethods", ValueType.ARRAY_OF_OBJECT,
true, JsonArray.class, errors, ".");
if (jsonLoginMethods == null) {
return new ArrayList<>();
}
if (jsonLoginMethods.size() == 0) {
errors.add("At least one loginMethod is required.");
return new ArrayList<>();
}
if (jsonLoginMethods.size() > 1) {
if (!Utils.isAccountLinkingEnabled(main, appIdentifier)) {
errors.add("Account linking must be enabled to import multiple loginMethods.");
}
}
validateAndNormaliseIsPrimaryField(jsonLoginMethods, errors);
List<LoginMethod> loginMethods = new ArrayList<>();
for (JsonElement jsonLoginMethod : jsonLoginMethods) {
JsonObject jsonLoginMethodObj = jsonLoginMethod.getAsJsonObject();
String recipeId = parseAndValidateFieldType(jsonLoginMethodObj, "recipeId", ValueType.STRING, true,
String.class, errors, " for a loginMethod.");
JsonArray tenantIds = parseAndValidateFieldType(jsonLoginMethodObj, "tenantIds", ValueType.ARRAY_OF_STRING,
false, JsonArray.class, errors, " for a loginMethod.");
Boolean isVerified = parseAndValidateFieldType(jsonLoginMethodObj, "isVerified", ValueType.BOOLEAN, false,
Boolean.class, errors, " for a loginMethod.");
Boolean isPrimary = parseAndValidateFieldType(jsonLoginMethodObj, "isPrimary", ValueType.BOOLEAN, false,
Boolean.class, errors, " for a loginMethod.");
Long timeJoined = parseAndValidateFieldType(jsonLoginMethodObj, "timeJoinedInMSSinceEpoch", ValueType.LONG,
false, Long.class, errors, " for a loginMethod");
recipeId = validateAndNormaliseRecipeId(recipeId, errors);
List<String> normalisedTenantIds = validateAndNormaliseTenantIds(main, appIdentifier, tenantIds, errors,
" for " + recipeId + " recipe.");
isPrimary = validateAndNormaliseIsPrimary(isPrimary);
isVerified = validateAndNormaliseIsVerified(isVerified);
long timeJoinedInMSSinceEpoch = validateAndNormaliseTimeJoined(timeJoined, errors);
String supertokensUserId = switch (idMode) {
case READ_STORED -> parseAndValidateFieldType(jsonLoginMethodObj, "superTokensUserId", ValueType.STRING,
true, String.class, errors, " for a loginMethod");
case GENERATE -> Utils.getUUID();
};
if ("emailpassword".equals(recipeId)) {
String email = parseAndValidateFieldType(jsonLoginMethodObj, "email", ValueType.STRING, true,
String.class, errors, " for an emailpassword recipe.");
String passwordHash = parseAndValidateFieldType(jsonLoginMethodObj, "passwordHash", ValueType.STRING,
false, String.class, errors, " for an emailpassword recipe.");
String hashingAlgorithm = parseAndValidateFieldType(jsonLoginMethodObj, "hashingAlgorithm",
ValueType.STRING, false, String.class, errors, " for an emailpassword recipe.");
String plainTextPassword = parseAndValidateFieldType(jsonLoginMethodObj, "plainTextPassword",
ValueType.STRING, false, String.class, errors, " for an emailpassword recipe.");
if ((passwordHash == null || hashingAlgorithm == null) && plainTextPassword == null) {
errors.add("Either (passwordHash, hashingAlgorithm) or plainTextPassword is required for an emailpassword recipe.");
}
email = validateAndNormaliseEmail(email, errors);
CoreConfig.PASSWORD_HASHING_ALG normalisedHashingAlgorithm = validateAndNormaliseHashingAlgorithm(
hashingAlgorithm, errors);
hashingAlgorithm = normalisedHashingAlgorithm != null ? normalisedHashingAlgorithm.toString()
: hashingAlgorithm;
passwordHash = validateAndNormalisePasswordHash(main, appIdentifier, normalisedHashingAlgorithm,
passwordHash, errors);
loginMethods.add(new LoginMethod(normalisedTenantIds, recipeId, isVerified, isPrimary,
timeJoinedInMSSinceEpoch, email, passwordHash, hashingAlgorithm, plainTextPassword,
null, null, null, supertokensUserId));
} else if ("thirdparty".equals(recipeId)) {
String email = parseAndValidateFieldType(jsonLoginMethodObj, "email", ValueType.STRING, true,
String.class, errors, " for a thirdparty recipe.");
String thirdPartyId = parseAndValidateFieldType(jsonLoginMethodObj, "thirdPartyId", ValueType.STRING,
true, String.class, errors, " for a thirdparty recipe.");
String thirdPartyUserId = parseAndValidateFieldType(jsonLoginMethodObj, "thirdPartyUserId",
ValueType.STRING, true, String.class, errors, " for a thirdparty recipe.");
email = validateAndNormaliseEmail(email, errors);
thirdPartyId = validateAndNormaliseThirdPartyId(thirdPartyId, errors);
thirdPartyUserId = validateAndNormaliseThirdPartyUserId(thirdPartyUserId, errors);
loginMethods.add(new LoginMethod(normalisedTenantIds, recipeId, isVerified, isPrimary,
timeJoinedInMSSinceEpoch, email, null, null, null,
thirdPartyId, thirdPartyUserId, null, supertokensUserId));
} else if ("passwordless".equals(recipeId)) {
String email = parseAndValidateFieldType(jsonLoginMethodObj, "email", ValueType.STRING, false,
String.class, errors, " for a passwordless recipe.");
String phoneNumber = parseAndValidateFieldType(jsonLoginMethodObj, "phoneNumber", ValueType.STRING,
false, String.class, errors, " for a passwordless recipe.");
email = validateAndNormaliseEmail(email, errors);
phoneNumber = validateAndNormalisePhoneNumber(phoneNumber, errors);
if (email == null && phoneNumber == null) {
errors.add("Either email or phoneNumber is required for a passwordless recipe.");
}
loginMethods.add(new LoginMethod(normalisedTenantIds, recipeId, isVerified, isPrimary,
timeJoinedInMSSinceEpoch, email, null, null, null,
null, null, phoneNumber, supertokensUserId));
}
}
return loginMethods;
}
private String validateAndNormaliseExternalUserId(String externalUserId, List<String> errors) {
if (externalUserId == null) {
return null;
}
if (externalUserId.length() > 128) {
errors.add("externalUserId " + externalUserId + " is too long. Max length is 128.");
}
if (!allExternalUserIds.add(externalUserId)) {
errors.add("externalUserId " + externalUserId + " is not unique. It is already used by another user.");
}
// We just trim the externalUserId as per the UpdateExternalUserIdInfoAPI.java
return externalUserId.trim();
}
private String validateAndNormaliseUserRole(String role, List<String> errors) {
if (role.length() > 255) {
errors.add("role " + role + " is too long. Max length is 255.");
}
// We just trim the role as per the CreateRoleAPI.java
String normalisedRole = role.trim();
if (!Arrays.asList(allUserRoles).contains(normalisedRole)) {
errors.add("Role " + normalisedRole + " does not exist.");
}
return normalisedRole;
}
private String validateAndNormaliseTotpSecretKey(String secretKey, List<String> errors) {
if (secretKey == null) {
return null;
}
if (secretKey.length() > 256) {
errors.add("TOTP secretKey " + secretKey + " is too long. Max length is 256.");
}
// We don't perform any normalisation on the secretKey in ImportTotpDeviceAPI.java
return secretKey;
}
private Integer validateAndNormaliseTotpPeriod(Integer period, List<String> errors) {
// We default to 30 if period is null
if (period == null) {
return 30;
}
if (period.intValue() < 1) {
errors.add("period should be > 0 for a totp device.");
return null;
}
return period;
}
private Integer validateAndNormaliseTotpSkew(Integer skew, List<String> errors) {
// We default to 1 if skew is null
if (skew == null) {
return 1;
}
if (skew.intValue() < 0) {
errors.add("skew should be >= 0 for a totp device.");
return null;
}
return skew;
}
private String validateAndNormaliseTotpDeviceName(String deviceName, List<String> errors) {
if (deviceName == null) {
return null;
}
if (deviceName.length() > 256) {
errors.add("TOTP deviceName " + deviceName + " is too long. Max length is 256.");
}
// We normalise the deviceName as per the ImportTotpDeviceAPI.java
return deviceName.trim();
}
private void validateAndNormaliseIsPrimaryField(JsonArray jsonLoginMethods, List<String> errors) {
// We are validating that only one loginMethod has isPrimary as true
boolean hasPrimaryLoginMethod = false;
for (JsonElement jsonLoginMethod : jsonLoginMethods) {
JsonObject jsonLoginMethodObj = jsonLoginMethod.getAsJsonObject();
if (validateJsonFieldType(jsonLoginMethodObj, "isPrimary", ValueType.BOOLEAN)) {
if (jsonLoginMethodObj.get("isPrimary").getAsBoolean()) {
if (hasPrimaryLoginMethod) {
errors.add("No two loginMethods can have isPrimary as true.");
}
hasPrimaryLoginMethod = true;
}
}
}
}
private String validateAndNormaliseRecipeId(String recipeId, List<String> errors) {
if (recipeId == null) {
return null;
}
// We don't perform any normalisation on the recipeId after reading it from request header.
// We will validate it as is.
if (!Arrays.asList("emailpassword", "thirdparty", "passwordless").contains(recipeId)) {
errors.add("Invalid recipeId for loginMethod. Pass one of emailpassword, thirdparty or, passwordless!");
}
return recipeId;
}
private List<String> validateAndNormaliseTenantIds(Main main, AppIdentifier appIdentifier,
JsonArray tenantIds, List<String> errors, String errorSuffix)
throws StorageQueryException, TenantOrAppNotFoundException {
if (tenantIds == null) {
return List.of(TenantIdentifier.DEFAULT_TENANT_ID); // Default to DEFAULT_TENANT_ID ("public")
}
List<String> normalisedTenantIds = new ArrayList<>();
for (JsonElement tenantIdEl : tenantIds) {
String tenantId = tenantIdEl.getAsString();
tenantId = validateAndNormaliseTenantId(main, appIdentifier, tenantId, errors, errorSuffix);
if (tenantId != null) {
normalisedTenantIds.add(tenantId);
}
}
return normalisedTenantIds;
}
private String validateAndNormaliseTenantId(Main main, AppIdentifier appIdentifier, String tenantId,
List<String> errors, String errorSuffix)
throws StorageQueryException, TenantOrAppNotFoundException {
if (tenantId == null || tenantId.equals(TenantIdentifier.DEFAULT_TENANT_ID)) {
return tenantId;
}
if (Arrays.stream(FeatureFlag.getInstance(main, appIdentifier).getEnabledFeatures())
.noneMatch(t -> t == EE_FEATURES.MULTI_TENANCY)) {
errors.add("Multitenancy must be enabled before importing users to a different tenant.");
return null;
}
// We make the tenantId lowercase while parsing from the request in WebserverAPI.java
String normalisedTenantId = tenantId.trim().toLowerCase();
TenantConfig[] allTenantConfigs = Multitenancy.getAllTenantsForApp(appIdentifier, main);
Set<String> validTenantIds = new HashSet<>();
Arrays.stream(allTenantConfigs)
.forEach(tenantConfig -> validTenantIds.add(tenantConfig.tenantIdentifier.getTenantId()));
if (!validTenantIds.contains(normalisedTenantId)) {
errors.add("Invalid tenantId: " + tenantId + errorSuffix);
return null;
}
return normalisedTenantId;
}
private Boolean validateAndNormaliseIsPrimary(Boolean isPrimary) {
// We set the default value as false
return isPrimary == null ? false : isPrimary;
}
private Boolean validateAndNormaliseIsVerified(Boolean isVerified) {
// We set the default value as false
return isVerified == null ? false : isVerified;
}
private long validateAndNormaliseTimeJoined(Long timeJoined, List<String> errors) {
// We default timeJoined to currentTime if it is null
if (timeJoined == null) {
return System.currentTimeMillis();
}
if (timeJoined > System.currentTimeMillis()) {
errors.add("timeJoined cannot be in future for a loginMethod.");
}
if (timeJoined < 0) {
errors.add("timeJoined cannot be < 0 for a loginMethod.");
}
return timeJoined.longValue();
}
private String validateAndNormaliseEmail(String email, List<String> errors) {
if (email == null) {
return null;
}
if (email.length() > 255) {
errors.add("email " + email + " is too long. Max length is 256.");
}
// We normalise the email as per the SignUpAPI.java
return Utils.normaliseEmail(email);
}
private CoreConfig.PASSWORD_HASHING_ALG validateAndNormaliseHashingAlgorithm(String hashingAlgorithm,
List<String> errors) {
if (hashingAlgorithm == null) {
return null;
}
try {
// We trim the hashingAlgorithm and make it uppercase as per the ImportUserWithPasswordHashAPI.java
return CoreConfig.PASSWORD_HASHING_ALG.valueOf(hashingAlgorithm.trim().toUpperCase());
} catch (IllegalArgumentException e) {
errors.add(
"Invalid hashingAlgorithm for emailpassword recipe. Pass one of bcrypt, argon2 or, firebase_scrypt!");
return null;
}
}
private String validateAndNormalisePasswordHash(Main main, AppIdentifier appIdentifier,
CoreConfig.PASSWORD_HASHING_ALG hashingAlgorithm, String passwordHash, List<String> errors)
throws TenantOrAppNotFoundException {
if (hashingAlgorithm == null || passwordHash == null) {
return passwordHash;
}
if (passwordHash.length() > 256) {
errors.add("passwordHash is too long. Max length is 256.");
}
// We trim the passwordHash and validate it as per ImportUserWithPasswordHashAPI.java
passwordHash = passwordHash.trim();
try {
PasswordHashingUtils.assertSuperTokensSupportInputPasswordHashFormat(appIdentifier, main, passwordHash,
hashingAlgorithm);
} catch (UnsupportedPasswordHashingFormatException e) {
errors.add(e.getMessage());
}
return passwordHash;
}
private String validateAndNormaliseThirdPartyId(String thirdPartyId, List<String> errors) {
if (thirdPartyId == null) {
return null;
}
if (thirdPartyId.length() > 28) {
errors.add("thirdPartyId " + thirdPartyId + " is too long. Max length is 28.");
}
// We don't perform any normalisation on the thirdPartyId in SignInUpAPI.java
return thirdPartyId;
}
private String validateAndNormaliseThirdPartyUserId(String thirdPartyUserId, List<String> errors) {
if (thirdPartyUserId == null) {
return null;
}
if (thirdPartyUserId.length() > 256) {
errors.add("thirdPartyUserId " + thirdPartyUserId + " is too long. Max length is 256.");
}
// We don't perform any normalisation on the thirdPartyUserId in SignInUpAPI.java
return thirdPartyUserId;
}
private String validateAndNormalisePhoneNumber(String phoneNumber, List<String> errors) {
if (phoneNumber == null) {
return null;
}
if (phoneNumber.length() > 256) {
errors.add("phoneNumber " + phoneNumber + " is too long. Max length is 256.");
}
// We normalise the phoneNumber as per the CreateCodeAPI.java
return Utils.normalizeIfPhoneNumber(phoneNumber);
}
private void validateTenantIdsForRoleAndLoginMethods(Main main, AppIdentifier appIdentifier,
List<UserRole> userRoles, List<LoginMethod> loginMethods, List<String> errors)
throws TenantOrAppNotFoundException {
if (loginMethods == null) {
return;
}
// First validate that tenantIds provided for userRoles also exist in the loginMethods
if (userRoles != null) {
for (UserRole userRole : userRoles) {
for (String tenantId : userRole.tenantIds) {
if (!tenantId.equals(TenantIdentifier.DEFAULT_TENANT_ID) && loginMethods.stream()
.noneMatch(loginMethod -> loginMethod.tenantIds.contains(tenantId))) {
errors.add("TenantId " + tenantId + " for a user role does not exist in loginMethods.");
}
}
}
}
// Now validate that all the tenants share the same storage
String commonTenantUserPoolId = null;
for (LoginMethod loginMethod : loginMethods) {
for (String tenantId : loginMethod.tenantIds) {
TenantIdentifier tenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), tenantId);
Storage storage = StorageLayer.getStorage(tenantIdentifier, main);
String tenantUserPoolId = storage.getUserPoolId();
if (commonTenantUserPoolId == null) {
commonTenantUserPoolId = tenantUserPoolId;
} else if (!commonTenantUserPoolId.equals(tenantUserPoolId)) {
errors.add("All tenants for a user must share the same database for " + loginMethod.recipeId
+ " recipe.");
break; // Break to avoid adding the same error multiple times for the same loginMethod
}
}
}
}
public static BulkImportUser.LoginMethod getPrimaryLoginMethod(BulkImportUser user) {
return getPrimaryLoginMethod(user.loginMethods);
}
// Returns the primary loginMethod of the user. If no loginMethod is marked as
// primary, then the oldest loginMethod is returned.
public static BulkImportUser.LoginMethod getPrimaryLoginMethod(List<LoginMethod> loginMethods) {
BulkImportUser.LoginMethod oldestLM = loginMethods.get(0);
for (BulkImportUser.LoginMethod lm : loginMethods) {
if (lm.isPrimary) {
return lm;
}
if (lm.timeJoinedInMSSinceEpoch < oldestLM.timeJoinedInMSSinceEpoch) {
oldestLM = lm;
}
}
return oldestLM;
}
public enum IDMode {
GENERATE,
READ_STORED;
}
// Returns a map of recipe user ids -> primary user ids
public static Map<String, String> collectRecipeIdsToPrimaryIds(List<BulkImportUser> users) {
Map<String, String> recipeUserIdByPrimaryUserId = new HashMap<>();
if(users == null){
return recipeUserIdByPrimaryUserId;
}
for(BulkImportUser user: users){
LoginMethod primaryLM = BulkImportUserUtils.getPrimaryLoginMethod(user);
for (LoginMethod lm : user.loginMethods) {
if (lm.getSuperTokenOrExternalUserId().equals(primaryLM.getSuperTokenOrExternalUserId())) {
continue;
}
recipeUserIdByPrimaryUserId.put(lm.getSuperTokenOrExternalUserId(),
primaryLM.getSuperTokenOrExternalUserId());
}
}
return recipeUserIdByPrimaryUserId;
}
public static LoginMethod findLoginMethodByRecipeUserId(List<BulkImportUser> users, String recipeUserId) {
if(users == null || users.isEmpty() || recipeUserId == null){
return null;
}
for(BulkImportUser user: users) {
for (LoginMethod loginMethod : user.loginMethods) {
if (recipeUserId.equals(loginMethod.superTokensUserId)) {
return loginMethod;
}
}
}
return null;
}
public static BulkImportUser findUserByPrimaryId(List<BulkImportUser> users, String primaryUserId) {
if(users == null || users.isEmpty() || primaryUserId == null){
return null;
}
for(BulkImportUser user: users) {
if(primaryUserId.equals(user.primaryUserId)){
return user;
}
}
return null;
}
}

View File

@ -1,33 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport.exceptions;
import java.util.List;
public class InvalidBulkImportDataException extends Exception {
private static final long serialVersionUID = 1L;
public List<String> errors;
public InvalidBulkImportDataException(List<String> errors) {
super("Data has missing or invalid fields. Please check the errors field for more details.");
this.errors = errors;
}
public void addError(String error) {
this.errors.add(error);
}
}

View File

@ -32,13 +32,10 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
private static final String HOST_FILE_KEY = "host=";
private static final String TEST_MODE = "test_mode";
private static final String FORCE_NO_IN_MEM_DB = "forceNoInMemDB=true";
private static final String TEMP_DIR_LOCATION_KEY = "tempDirLocation=";
private final String installationPath;
private final String configFilePath;
private final Integer port;
private final String host;
private final String tempDirLocation;
// if this is true, then even in DEV mode, we will not use in memory db, even if there is an error in the plugin
private final boolean forceNoInMemoryDB;
@ -47,7 +44,6 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
checkIfArgsIsCorrect(args);
String installationPath = args[0];
String configFilePathTemp = null;
String tempDirLocationPath = null;
Integer portTemp = null;
String hostTemp = null;
boolean forceNoInMemoryDBTemp = false;
@ -58,16 +54,7 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
if (!new File(configFilePathTemp).isAbsolute()) {
throw new QuitProgramException("configPath option must be an absolute path only");
}
} else if (curr.startsWith(TEMP_DIR_LOCATION_KEY)) {
tempDirLocationPath = curr.split(TEMP_DIR_LOCATION_KEY)[1];
if (!new File(tempDirLocationPath).isAbsolute()) {
throw new QuitProgramException("tempDirLocation option must be an absolute path only");
}
if(!tempDirLocationPath.isEmpty() && !tempDirLocationPath.endsWith(File.separator)){
tempDirLocationPath = tempDirLocationPath + File.separator;
}
}
else if (curr.startsWith(PORT_FILE_KEY)) {
} else if (curr.startsWith(PORT_FILE_KEY)) {
portTemp = Integer.parseInt(curr.split(PORT_FILE_KEY)[1]);
} else if (curr.startsWith(HOST_FILE_KEY)) {
hostTemp = curr.split(HOST_FILE_KEY)[1];
@ -82,7 +69,6 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
this.port = portTemp;
this.host = hostTemp;
this.forceNoInMemoryDB = forceNoInMemoryDBTemp;
this.tempDirLocation = tempDirLocationPath;
}
private static CLIOptions getInstance(Main main) {
@ -137,8 +123,4 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
public boolean isForceNoInMemoryDB() {
return this.forceNoInMemoryDB;
}
public String getTempDirLocation() {
return tempDirLocation;
}
}

View File

@ -18,7 +18,7 @@ package io.supertokens.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.google.gson.GsonBuilder;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.ProcessState;
@ -31,7 +31,6 @@ import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.utils.ConfigMapper;
import org.jetbrains.annotations.TestOnly;
import java.io.File;
@ -50,19 +49,16 @@ public class Config extends ResourceDistributor.SingletonResource {
private Config(Main main, String configFilePath) throws InvalidConfigException, IOException {
this.main = main;
final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
Object configObj = mapper.readValue(new File(configFilePath), Object.class);
JsonObject jsonConfig = new GsonBuilder().serializeNulls().create().toJsonTree(configObj).getAsJsonObject();
CoreConfig.updateConfigJsonFromEnv(jsonConfig);
StorageLayer.updateConfigJsonFromEnv(main, jsonConfig);
CoreConfig config = ConfigMapper.mapConfig(jsonConfig, CoreConfig.class);
config.normalizeAndValidate(main, true);
CoreConfig config = mapper.readValue(new File(configFilePath), CoreConfig.class);
config.normalizeAndValidate(main);
this.core = config;
}
private Config(Main main, JsonObject jsonConfig) throws IOException, InvalidConfigException {
this.main = main;
CoreConfig config = ConfigMapper.mapConfig(jsonConfig, CoreConfig.class);
config.normalizeAndValidate(main, false);
final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
CoreConfig config = mapper.readValue(jsonConfig.toString(), CoreConfig.class);
config.normalizeAndValidate(main);
this.core = config;
}
@ -93,20 +89,12 @@ public class Config extends ResourceDistributor.SingletonResource {
// omit them from the output json.
ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory());
Object obj = yamlReader.readValue(new File(getConfigFilePath(main)), Object.class);
JsonObject configJson = new GsonBuilder().serializeNulls().create().toJsonTree(obj).getAsJsonObject();
CoreConfig.updateConfigJsonFromEnv(configJson);
StorageLayer.updateConfigJsonFromEnv(main, configJson);
return configJson;
return new Gson().toJsonTree(obj).getAsJsonObject();
}
private static String getConfigFilePath(Main main) {
String configFile = "config.yaml";
if (Main.isTesting) {
String workerId = System.getProperty("org.gradle.test.worker", "");
configFile = "config" + workerId + ".yaml";
}
return CLIOptions.get(main).getConfigFilePath() == null
? CLIOptions.get(main).getInstallationPath() + configFile
? CLIOptions.get(main).getInstallationPath() + "config.yaml"
: CLIOptions.get(main).getConfigFilePath();
}
@ -128,12 +116,12 @@ public class Config extends ResourceDistributor.SingletonResource {
// At this point, we know that all configs are valid.
try {
main.getResourceDistributor().withResourceDistributorLock(() -> {
Map<ResourceDistributor.KeyClass, ResourceDistributor.SingletonResource> existingResources =
main.getResourceDistributor()
.getAllResourcesWithResourceKey(RESOURCE_KEY);
main.getResourceDistributor().clearAllResourcesWithResourceKey(RESOURCE_KEY);
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
try {
try {
Map<ResourceDistributor.KeyClass, ResourceDistributor.SingletonResource> existingResources =
main.getResourceDistributor()
.getAllResourcesWithResourceKey(RESOURCE_KEY);
main.getResourceDistributor().clearAllResourcesWithResourceKey(RESOURCE_KEY);
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
ResourceDistributor.SingletonResource resource = existingResources.get(
new ResourceDistributor.KeyClass(
key.getTenantIdentifier(),
@ -147,16 +135,19 @@ public class Config extends ResourceDistributor.SingletonResource {
main.getResourceDistributor()
.setResource(key.getTenantIdentifier(), RESOURCE_KEY,
new Config(main, normalisedConfigs.get(key)));
}
} catch (Exception e) {
Logging.error(main, key.getTenantIdentifier(), e.getMessage(), false);
// continue loading other resources
}
} catch (InvalidConfigException | IOException e) {
throw new ResourceDistributor.FuncException(e);
}
return null;
});
} catch (ResourceDistributor.FuncException e) {
throw new IllegalStateException("should never happen", e);
if (e.getCause() instanceof InvalidConfigException) {
throw (InvalidConfigException) e.getCause();
}
throw new RuntimeException(e);
}
}
@ -315,7 +306,7 @@ public class Config extends ResourceDistributor.SingletonResource {
@TestOnly
public static CoreConfig getConfig(Main main) {
try {
return getConfig(ResourceDistributor.getAppForTesting(), main);
return getConfig(new TenantIdentifier(null, null, null), main);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}

View File

@ -19,18 +19,16 @@ package io.supertokens.config;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.cliOptions.CLIOptions;
import io.supertokens.config.annotations.*;
import io.supertokens.pluginInterface.ConfigFieldInfo;
import io.supertokens.config.annotations.ConfigYamlOnly;
import io.supertokens.config.annotations.IgnoreForAnnotationCheck;
import io.supertokens.config.annotations.NotConflictingInApp;
import io.supertokens.pluginInterface.LOG_LEVEL;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.utils.SemVer;
import io.supertokens.webserver.Utils;
import io.supertokens.webserver.WebserverAPI;
@ -41,224 +39,124 @@ import org.jetbrains.annotations.TestOnly;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.regex.PatternSyntaxException;
@JsonIgnoreProperties(ignoreUnknown = true)
public class CoreConfig {
// Annotations and their meaning
// @ConfigDescription: This is a description of the config field. Note that this description should match with the
// description in the config.yaml and devConfig.yaml file.
// @EnumProperty: The property has fixed set of values (like an enum)
// @ConfigYamlOnly: The property is configurable only from the config.yaml file.
// @NotConflictingInApp: The property cannot have different values for tenants within an app
// @IgnoreForAnnotationCheck: Set this if the property is neither @ConfigYamlOnly nor @NotConflictingInApp, or should
// simply be ignored by the test (if the property is just an internal member and not an exposed config) that checks
// for annotations on all properties.
// @HideFromDashboard: The property should not be shown in the dashboard
@IgnoreForAnnotationCheck
public static final String[] PROTECTED_CONFIGS = new String[]{
"ip_allow_regex",
"ip_deny_regex",
"oauth_provider_public_service_url",
"oauth_provider_admin_service_url",
"oauth_provider_consent_login_base_url",
"oauth_provider_url_configured_in_oauth_provider",
"saml_legacy_acs_url"
};
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription("The version of the core config.")
private int core_config_version = -1;
@EnvName("ACCESS_TOKEN_VALIDITY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("Time in seconds for how long an access token is valid for. [Default: 3600 (1 hour)]")
private long access_token_validity = 3600; // in seconds
@EnvName("ACCESS_TOKEN_BLACKLISTING")
@NotConflictingInApp
@JsonProperty
@ConfigDescription(
"Deprecated, please see changelog. Only used in CDI<=2.18 If true, allows for immediate revocation of any" +
" access token. Keep in mind that setting this to true will result in a db query for each API " +
"call that requires authentication. (Default: false)")
private boolean access_token_blacklisting = false;
@EnvName("REFRESH_TOKEN_VALIDITY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("Time in mins for how long a refresh token is valid for. [Default: 60 * 2400 (100 days)]")
private double refresh_token_validity = 60 * 2400; // in mins
@EnvName("PASSWORD_RESET_TOKEN_LIFETIME")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"Time in milliseconds for how long a password reset token / link is valid for. [Default: 3600000 (1 hour)]")
private long password_reset_token_lifetime = 3600000; // in MS
@EnvName("EMAIL_VERIFICATION_TOKEN_LIFETIME")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"Time in milliseconds for how long an email verification token / link is valid for. [Default: 24 * 3600 *" +
" 1000 (1 day)]")
private long email_verification_token_lifetime = 24 * 3600 * 1000; // in MS
@EnvName("PASSWORDLESS_MAX_CODE_INPUT_ATTEMPTS")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"The maximum number of code input attempts per login before the user needs to restart. (Default: 5)")
private int passwordless_max_code_input_attempts = 5;
@EnvName("PASSWORDLESS_CODE_LIFETIME")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"Time in milliseconds for how long a passwordless code is valid for. [Default: 900000 (15 mins)]")
private long passwordless_code_lifetime = 900000; // in MS
@EnvName("TOTP_MAX_ATTEMPTS")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription("The maximum number of invalid TOTP attempts that will trigger rate limiting. (Default: 5)")
private int totp_max_attempts = 5;
@EnvName("TOTP_RATE_LIMIT_COOLDOWN_SEC")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"The time in seconds for which the user will be rate limited once totp_max_attempts is crossed. [Default:" +
" 900 (15 mins)]")
private int totp_rate_limit_cooldown_sec = 900; // in seconds (Default 15 mins)
@IgnoreForAnnotationCheck
private final String logDefault = "asdkfahbdfk3kjHS";
@EnvName("INFO_LOG_PATH")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"Give the path to a file (on your local system) in which the SuperTokens service can write INFO logs to. " +
"Set it to \"null\" if you want it to log to standard output instead. (Default: installation " +
"directory/logs/info.log)")
private String info_log_path = logDefault;
@EnvName("ERROR_LOG_PATH")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"Give the path to a file (on your local system) in which the SuperTokens service can write ERROR logs to." +
" Set it to \"null\" if you want it to log to standard error instead. (Default: installation " +
"directory/logs/error.log)")
private String error_log_path = logDefault;
@EnvName("ACCESS_TOKEN_SIGNING_KEY_DYNAMIC")
@NotConflictingInApp
@JsonProperty
@ConfigDescription(
"Deprecated, please see changelog. If this is set to true, the access tokens created using CDI<=2.18 will" +
" be signed using a static signing key. (Default: true)")
private boolean access_token_signing_key_dynamic = true;
@EnvName("ACCESS_TOKEN_DYNAMIC_SIGNING_KEY_UPDATE_INTERVAL")
@NotConflictingInApp
@JsonProperty("access_token_dynamic_signing_key_update_interval")
@JsonAlias({"access_token_dynamic_signing_key_update_interval", "access_token_signing_key_update_interval"})
@ConfigDescription("Time in hours for how frequently the dynamic signing key will change. [Default: 168 (1 week)]")
private double access_token_dynamic_signing_key_update_interval = 168; // in hours
@EnvName("SUPERTOKENS_PORT")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("The port at which SuperTokens service runs. (Default: 3567)")
private int port = 3567;
@EnvName("SUPERTOKENS_HOST")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"The host on which SuperTokens service runs. Values here can be localhost, example.com, 0.0.0.0 or any IP" +
" address associated with your machine. (Default: localhost)")
private String host = "localhost";
@EnvName("MAX_SERVER_POOL_SIZE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Sets the max thread pool size for incoming http server requests. (Default: 10)")
private int max_server_pool_size = 10;
@EnvName("API_KEYS")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"The API keys to query an instance using this config file. The format is \"key1,key2,key3\". Keys can " +
"only contain '=', '-' and alpha-numeric (including capital) chars. Each key must have a minimum " +
"length of 20 chars. (Default: null)")
private String api_keys = null;
@EnvName("DISABLE_TELEMETRY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription(
"Learn more about Telemetry here: https://github.com/supertokens/supertokens-core/wiki/Telemetry. " +
"(Default: false)")
private boolean disable_telemetry = false;
@EnvName("PASSWORD_HASHING_ALG")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("The password hashing algorithm to use. Values are \"ARGON2\" | \"BCRYPT\". (Default: BCRYPT)")
@EnumProperty({"ARGON2", "BCRYPT"})
private String password_hashing_alg = "BCRYPT";
@EnvName("ARGON2_ITERATIONS")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Number of iterations for argon2 password hashing. (Default: 1)")
private int argon2_iterations = 1;
@EnvName("ARGON2_MEMORY_KB")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Amount of memory in kb for argon2 password hashing. [Default: 87795 (85 mb)]")
private int argon2_memory_kb = 87795; // 85 mb
@EnvName("ARGON2_PARALLELISM")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Amount of parallelism for argon2 password hashing. (Default: 2)")
private int argon2_parallelism = 2;
@EnvName("ARGON2_HASHING_POOL_SIZE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"Number of concurrent argon2 hashes that can happen at the same time for sign up or sign in requests. " +
"(Default: 1)")
private int argon2_hashing_pool_size = 1;
@EnvName("FIREBASE_PASSWORD_HASHING_POOL_SIZE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"Number of concurrent firebase scrypt hashes that can happen at the same time for sign in requests. " +
"(Default: 1)")
private int firebase_password_hashing_pool_size = 1;
@EnvName("BCRYPT_LOG_ROUNDS")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Number of rounds to set for bcrypt password hashing. (Default: 11)")
private int bcrypt_log_rounds = 11;
// TODO: add https in later version
@ -271,190 +169,49 @@ public class CoreConfig {
// # webserver_https_enabled:
@ConfigYamlOnly
@JsonProperty
@IgnoreForAnnotationCheck
private boolean webserver_https_enabled = false;
@EnvName("BASE_PATH")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Used to prepend a base path to all APIs when querying the core.")
private String base_path = "";
@EnvName("LOG_LEVEL")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"Logging level for the core. Values are \"DEBUG\" | \"INFO\" | \"WARN\" | \"ERROR\" | \"NONE\". (Default:" +
" INFO)")
@EnumProperty({"DEBUG", "INFO", "WARN", "ERROR", "NONE"})
private String log_level = "INFO";
@EnvName("FIREBASE_PASSWORD_HASHING_SIGNER_KEY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("The signer key used for firebase scrypt password hashing. (Default: null)")
private String firebase_password_hashing_signer_key = null;
@EnvName("IP_ALLOW_REGEX")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"Regex for allowing requests from IP addresses that match with the value. For example, use the value of " +
"127\\.\\d+\\.\\d+\\.\\d+|::1|0:0:0:0:0:0:0:1 to allow only localhost to query the core")
private String ip_allow_regex = null;
@EnvName("IP_DENY_REGEX")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"Regex for denying requests from IP addresses that match with the value. Comment this value to deny no IP" +
" address.")
private String ip_deny_regex = null;
@EnvName("OAUTH_PROVIDER_PUBLIC_SERVICE_URL")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to connect to the OAuth provider public service.")
private String oauth_provider_public_service_url = null;
@EnvName("OAUTH_PROVIDER_ADMIN_SERVICE_URL")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to connect to the OAuth provider admin service.")
private String oauth_provider_admin_service_url = null;
@EnvName("OAUTH_PROVIDER_CONSENT_LOGIN_BASE_URL")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to replace the default consent and login URLs to {apiDomain}.")
private String oauth_provider_consent_login_base_url = null;
@EnvName("OAUTH_PROVIDER_URL_CONFIGURED_IN_OAUTH_PROVIDER")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to parse responses from the oauth provider when the oauth provider's internal address differs from the known public provider address.")
private String oauth_provider_url_configured_in_oauth_provider = null;
@EnvName("OAUTH_CLIENT_SECRET_ENCRYPTION_KEY")
@ConfigYamlOnly
@JsonProperty
@HideFromDashboard
@ConfigDescription("The encryption key used for saving OAuth client secret on the database.")
private String oauth_client_secret_encryption_key = null;
@EnvName("SUPERTOKENS_SAAS_SECRET")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"This is used when deploying the core in SuperTokens SaaS infrastructure. If set, limits what database " +
"information is shown to / modifiable by the dev when they query the core to get the information " +
"about their tenants. It only exposes that information when this key is used instead of the " +
"regular api_keys config.")
private String supertokens_saas_secret = null;
@EnvName("SUPERTOKENS_MAX_CDI_VERSION")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"This is used when the core needs to assume a specific CDI version when CDI version is not specified in " +
"the request. When set to null, the core will assume the latest version of the CDI. (Default: " +
"null)")
private String supertokens_max_cdi_version = null;
@EnvName("SUPERTOKENS_SAAS_LOAD_ONLY_CUD")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"If specified, the supertokens service will only load the specified CUD even if there are more CUDs in " +
"the database and block all other CUDs from being used from this instance.")
private String supertokens_saas_load_only_cud = null;
@EnvName("SAML_LEGACY_ACS_URL")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("If specified, uses this URL as ACS URL for handling legacy SAML clients")
@HideFromDashboard
private String saml_legacy_acs_url = null;
@EnvName("SAML_SP_ENTITY_ID")
@JsonProperty
@IgnoreForAnnotationCheck
@ConfigDescription("Service provider's entity ID")
private String saml_sp_entity_id = null;
@EnvName("SAML_CLAIMS_VALIDITY")
@JsonProperty
@IgnoreForAnnotationCheck
@ConfigDescription("Duration for which SAML claims will be valid before it is consumed")
private long saml_claims_validity = 300000;
@EnvName("SAML_RELAY_STATE_VALIDITY")
@JsonProperty
@IgnoreForAnnotationCheck
@ConfigDescription("Duration for which SAML relay state will be valid before it is consumed")
private long saml_relay_state_validity = 300000;
@IgnoreForAnnotationCheck
private Set<LOG_LEVEL> allowedLogLevels = null;
@IgnoreForAnnotationCheck
private boolean isNormalizedAndValid = false;
@EnvName("BULK_MIGRATION_PARALLELISM")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("If specified, the supertokens core will use the specified number of threads to complete the " +
"migration of users. (Default: number of available processor cores).")
private int bulk_migration_parallelism = Runtime.getRuntime().availableProcessors();
@EnvName("BULK_MIGRATION_BATCH_SIZE")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("If specified, the supertokens core will load the specified number of users for migrating in " +
"one single batch. (Default: 8000)")
private int bulk_migration_batch_size = 8000;
@EnvName("WEBAUTHN_RECOVER_ACCOUNT_TOKEN_LIFETIME")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("Time in milliseconds for how long a webauthn account recovery token is valid for. [Default: 3600000 (1 hour)]")
private long webauthn_recover_account_token_lifetime = 3600000; // in MS;
@EnvName("OTEL_COLLECTOR_CONNECTION_URI")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"The URL of the OpenTelemetry collector to which the core will send telemetry data. " +
"This should be in the format http://<host>:<port> or https://<host>:<port>. (Default: " +
"null)")
private String otel_collector_connection_uri = null;
@EnvName("DEADLOCK_LOGGER_ENABLE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"Enables or disables the deadlock logger. (Default: false)")
private boolean deadlock_logger_enable = false;
@IgnoreForAnnotationCheck
private static boolean disableOAuthValidationForTest = false;
@TestOnly
public static void setDisableOAuthValidationForTest(boolean val) {
if (!Main.isTesting) {
throw new IllegalStateException("This method can only be called during testing");
}
disableOAuthValidationForTest = val;
}
private String otel_collector_connection_uri = "http://localhost:4317";
public static Set<String> getValidFields() {
CoreConfig coreConfig = new CoreConfig();
@ -470,41 +227,6 @@ public class CoreConfig {
return validFields;
}
public String getOAuthProviderPublicServiceUrl() throws InvalidConfigException {
if (oauth_provider_public_service_url == null) {
throw new InvalidConfigException("oauth_provider_public_service_url is not set");
}
return oauth_provider_public_service_url;
}
public String getOAuthProviderAdminServiceUrl() throws InvalidConfigException {
if (oauth_provider_admin_service_url == null) {
throw new InvalidConfigException("oauth_provider_public_service_url is not set");
}
return oauth_provider_admin_service_url;
}
public String getOauthProviderConsentLoginBaseUrl() throws InvalidConfigException {
if(oauth_provider_consent_login_base_url == null){
throw new InvalidConfigException("oauth_provider_consent_login_base_url is not set");
}
return oauth_provider_consent_login_base_url;
}
public String getOAuthProviderUrlConfiguredInOAuthProvider() throws InvalidConfigException {
if(oauth_provider_url_configured_in_oauth_provider == null) {
throw new InvalidConfigException("oauth_provider_url_configured_in_oauth_provider is not set");
}
return oauth_provider_url_configured_in_oauth_provider;
}
public String getOAuthClientSecretEncryptionKey() throws InvalidConfigException {
if(oauth_client_secret_encryption_key == null) {
throw new InvalidConfigException("oauth_client_secret_encryption_key is not set");
}
return oauth_client_secret_encryption_key;
}
public String getIpAllowRegex() {
return ip_allow_regex;
}
@ -513,10 +235,6 @@ public class CoreConfig {
return ip_deny_regex;
}
public String getLogLevel() {
return log_level;
}
public Set<LOG_LEVEL> getLogLevels(Main main) {
if (allowedLogLevels != null) {
return allowedLogLevels;
@ -598,16 +316,16 @@ public class CoreConfig {
return core_config_version;
}
public long getAccessTokenValidityInMillis() {
return access_token_validity * 1000;
public long getAccessTokenValidity() {
return access_token_validity;
}
public boolean getAccessTokenBlacklisting() {
return access_token_blacklisting;
}
public long getRefreshTokenValidityInMillis() {
return (long) (refresh_token_validity * 60 * 1000);
public long getRefreshTokenValidity() {
return (long) (refresh_token_validity);
}
public long getPasswordResetTokenLifetime() {
@ -653,8 +371,8 @@ public class CoreConfig {
return access_token_signing_key_dynamic;
}
public long getAccessTokenDynamicSigningKeyUpdateIntervalInMillis() {
return (long) (access_token_dynamic_signing_key_update_interval * 3600 * 1000);
public long getAccessTokenDynamicSigningKeyUpdateInterval() {
return (long) (access_token_dynamic_signing_key_update_interval);
}
public String[] getAPIKeys() {
@ -684,89 +402,17 @@ public class CoreConfig {
return webserver_https_enabled;
}
public int getBulkMigrationParallelism() {
return bulk_migration_parallelism;
}
public long getWebauthnRecoverAccountTokenLifetime() {
return webauthn_recover_account_token_lifetime;
}
public int getBulkMigrationBatchSize() {
return bulk_migration_batch_size;
}
public String getOtelCollectorConnectionURI() {
return otel_collector_connection_uri;
}
public boolean isDeadlockLoggerEnabled() {
return deadlock_logger_enable;
}
public String getSAMLLegacyACSURL() {
return saml_legacy_acs_url;
}
public String getSAMLSPEntityID() {
return saml_sp_entity_id;
}
public long getSAMLClaimsValidity() {
return saml_claims_validity;
}
public long getSAMLRelayStateValidity() {
return saml_relay_state_validity;
}
private String getConfigFileLocation(Main main) {
return new File(CLIOptions.get(main).getConfigFilePath() == null
? CLIOptions.get(main).getInstallationPath() + "config.yaml"
: CLIOptions.get(main).getConfigFilePath()).getAbsolutePath();
}
public static void updateConfigJsonFromEnv(JsonObject configJson) {
Map<String, String> env = System.getenv();
for (Field field : CoreConfig.class.getDeclaredFields()) {
if (field.isAnnotationPresent(EnvName.class)) {
String envName = field.getAnnotation(EnvName.class).value();
String stringValue = env.get(envName);
if (stringValue == null || stringValue.isEmpty()) {
continue;
}
if (stringValue.startsWith("\"") && stringValue.endsWith("\"")) {
stringValue = stringValue.substring(1, stringValue.length() - 1);
stringValue = stringValue
.replace("\\n", "\n")
.replace("\\t", "\t")
.replace("\\r", "\r")
.replace("\\\"", "\"")
.replace("\\'", "'")
.replace("\\\\", "\\");
}
if (field.getType().equals(String.class)) {
configJson.addProperty(field.getName(), stringValue);
} else if (field.getType().equals(int.class)) {
configJson.addProperty(field.getName(), Integer.parseInt(stringValue));
} else if (field.getType().equals(long.class)) {
configJson.addProperty(field.getName(), Long.parseLong(stringValue));
} else if (field.getType().equals(boolean.class)) {
configJson.addProperty(field.getName(), Boolean.parseBoolean(stringValue));
} else if (field.getType().equals(float.class)) {
configJson.addProperty(field.getName(), Float.parseFloat(stringValue));
} else if (field.getType().equals(double.class)) {
configJson.addProperty(field.getName(), Double.parseDouble(stringValue));
}
}
}
}
void normalizeAndValidate(Main main, boolean includeConfigFilePath) throws InvalidConfigException {
void normalizeAndValidate(Main main) throws InvalidConfigException {
if (isNormalizedAndValid) {
return;
}
@ -779,9 +425,8 @@ public class CoreConfig {
}
if (access_token_validity < 1 || access_token_validity > 86400000) {
throw new InvalidConfigException(
"'access_token_validity' must be between 1 and 86400000 seconds inclusive." +
(includeConfigFilePath ? " The config file can be"
+ " found here: " + getConfigFileLocation(main) : ""));
"'access_token_validity' must be between 1 and 86400000 seconds inclusive. The config file can be"
+ " found here: " + getConfigFileLocation(main));
}
Boolean validityTesting = CoreConfigTestContent.getInstance(main)
.getValue(CoreConfigTestContent.VALIDITY_TESTING);
@ -790,18 +435,16 @@ public class CoreConfig {
if ((refresh_token_validity * 60) <= access_token_validity) {
if (!Main.isTesting || validityTesting) {
throw new InvalidConfigException(
"'refresh_token_validity' must be strictly greater than 'access_token_validity'." +
(includeConfigFilePath ? " The config file can be"
+ " found here: " + getConfigFileLocation(main) : ""));
"'refresh_token_validity' must be strictly greater than 'access_token_validity'. The config "
+ "file can be found here: " + getConfigFileLocation(main));
}
}
if (!Main.isTesting || validityTesting) { // since in testing we make this really small
if (access_token_dynamic_signing_key_update_interval < 1) {
throw new InvalidConfigException(
"'access_token_dynamic_signing_key_update_interval' must be greater than, equal to 1 hour." +
(includeConfigFilePath ? " The config file can be"
+ " found here: " + getConfigFileLocation(main) : ""));
"'access_token_dynamic_signing_key_update_interval' must be greater than, equal to 1 hour. The "
+ "config file can be found here: " + getConfigFileLocation(main));
}
}
@ -831,9 +474,8 @@ public class CoreConfig {
if (max_server_pool_size <= 0) {
throw new InvalidConfigException(
"'max_server_pool_size' must be >= 1." +
(includeConfigFilePath ? " The config file can be"
+ " found here: " + getConfigFileLocation(main) : ""));
"'max_server_pool_size' must be >= 1. The config file can be found here: "
+ getConfigFileLocation(main));
}
if (api_keys != null) {
@ -928,8 +570,7 @@ public class CoreConfig {
try {
filter.setAllow(ip_allow_regex);
} catch (PatternSyntaxException e) {
throw new InvalidConfigException(
"Provided regular expression is invalid for ip_allow_regex config");
throw new InvalidConfigException("Provided regular expression is invalid for ip_allow_regex config");
}
}
if (ip_deny_regex != null) {
@ -953,45 +594,7 @@ public class CoreConfig {
}
}
if (bulk_migration_parallelism < 1) {
throw new InvalidConfigException("Provided bulk_migration_parallelism must be >= 1");
}
if (bulk_migration_batch_size < 1) {
throw new InvalidConfigException("Provided bulk_migration_batch_size must be >= 1");
}
if (webauthn_recover_account_token_lifetime <= 0) {
throw new InvalidConfigException("Provided webauthn_recover_account_token_lifetime must be > 0");
}
for (String fieldId : CoreConfig.getValidFields()) {
try {
Field field = CoreConfig.class.getDeclaredField(fieldId);
if (field.isAnnotationPresent(EnumProperty.class)) {
String[] allowedValues = field.getAnnotation(EnumProperty.class).value();
try {
String value = field.get(this) != null ? field.get(this).toString() : null;
if (!Arrays.asList(Arrays.stream(allowedValues).map(str -> str.toLowerCase()).toArray())
.contains(value.toLowerCase())) {
throw new InvalidConfigException(
fieldId + " property is not set correctly. It must be one of "
+ Arrays.toString(allowedValues));
}
} catch (IllegalAccessException e) {
throw new InvalidConfigException("Could not access field " + fieldId);
}
}
} catch (NoSuchFieldException e) {
continue;
}
}
// Normalize
if (saml_sp_entity_id == null) {
saml_sp_entity_id = "https://saml.supertokens.com";
}
if (ip_allow_regex != null) {
ip_allow_regex = ip_allow_regex.trim();
if (ip_allow_regex.equals("")) {
@ -1080,72 +683,16 @@ public class CoreConfig {
if (supertokens_saas_load_only_cud != null) {
try {
supertokens_saas_load_only_cud = Utils
.normalizeAndValidateConnectionUriDomain(supertokens_saas_load_only_cud, true);
supertokens_saas_load_only_cud =
Utils.normalizeAndValidateConnectionUriDomain(supertokens_saas_load_only_cud, true);
} catch (ServletException e) {
throw new InvalidConfigException("supertokens_saas_load_only_cud is invalid");
}
}
if(oauth_provider_public_service_url != null) {
try {
URL url = new URL(oauth_provider_public_service_url);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_public_service_url is not a valid URL");
}
}
if(oauth_provider_admin_service_url != null) {
try {
URL url = new URL(oauth_provider_admin_service_url);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_admin_service_url is not a valid URL");
}
}
if(oauth_provider_consent_login_base_url != null) {
try {
URL url = new URL(oauth_provider_consent_login_base_url);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_consent_login_base_url is not a valid URL");
}
}
if(oauth_provider_url_configured_in_oauth_provider == null) {
oauth_provider_url_configured_in_oauth_provider = oauth_provider_public_service_url;
} else {
try {
URL url = new URL(oauth_provider_url_configured_in_oauth_provider);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_url_configured_in_oauth_provider is not a valid URL");
}
}
if (!disableOAuthValidationForTest) {
List<String> configsTogetherSet = Arrays.asList(oauth_provider_public_service_url, oauth_provider_admin_service_url, oauth_provider_consent_login_base_url);
if(isAnySet(configsTogetherSet) && !isAllSet(configsTogetherSet)) {
throw new InvalidConfigException("If any of the following is set, all of them has to be set: oauth_provider_public_service_url, oauth_provider_admin_service_url, oauth_provider_consent_login_base_url");
}
}
if (Main.isTesting) {
if (oauth_provider_public_service_url == null) {
oauth_provider_public_service_url = "http://localhost:" + System.getProperty("ST_OAUTH_PROVIDER_SERVICE_PORT");
}
if (oauth_provider_admin_service_url == null) {
oauth_provider_admin_service_url = "http://localhost:" + System.getProperty("ST_OAUTH_PROVIDER_ADMIN_PORT");
}
if (oauth_provider_url_configured_in_oauth_provider == null) {
oauth_provider_url_configured_in_oauth_provider = "http://localhost:4444";
}
if (oauth_client_secret_encryption_key == null) {
oauth_client_secret_encryption_key = "clientsecretencryptionkey";
}
if (oauth_provider_consent_login_base_url == null) {
oauth_provider_consent_login_base_url = "http://localhost:3001/auth";
}
}
access_token_validity = access_token_validity * 1000;
access_token_dynamic_signing_key_update_interval = access_token_dynamic_signing_key_update_interval * 3600 * 1000;
refresh_token_validity = refresh_token_validity * 60 * 1000;
isNormalizedAndValid = true;
}
@ -1175,88 +722,16 @@ public class CoreConfig {
}
static void assertThatCertainConfigIsNotSetForAppOrTenants(JsonObject config) throws InvalidConfigException {
// these are all configs that are per core. So we do not allow the developer to
// set these dynamically.
// these are all configs that are per core. So we do not allow the developer to set these dynamically.
for (Field field : CoreConfig.class.getDeclaredFields()) {
if (field.isAnnotationPresent(ConfigYamlOnly.class)) {
if (config.has(field.getName())) {
throw new InvalidConfigException(
field.getName() + " can only be set via the core's base config setting");
throw new InvalidConfigException(field.getName() + " can only be set via the core's base config setting");
}
}
}
}
public static ArrayList<ConfigFieldInfo> getConfigFieldsInfoForDashboard(Main main,
TenantIdentifier tenantIdentifier)
throws IOException, TenantOrAppNotFoundException {
JsonObject tenantConfig = new Gson().toJsonTree(Config.getConfig(tenantIdentifier, main)).getAsJsonObject();
JsonObject defaultConfig = new Gson().toJsonTree(new CoreConfig()).getAsJsonObject();
ArrayList<ConfigFieldInfo> result = new ArrayList<ConfigFieldInfo>();
for (String fieldId : CoreConfig.getValidFields()) {
try {
Field field = CoreConfig.class.getDeclaredField(fieldId);
// If fieldId is not annotated with JsonProperty
// or is annotated with ConfigYamlOnly, then skip
if (!field.isAnnotationPresent(JsonProperty.class)
|| field.isAnnotationPresent(ConfigYamlOnly.class)
|| field.isAnnotationPresent(HideFromDashboard.class)
|| fieldId.equals("core_config_version")) {
continue;
}
String key = field.getName();
String description = field.isAnnotationPresent(ConfigDescription.class)
? field.getAnnotation(ConfigDescription.class).value()
: "";
if (description.contains("Deprecated")) {
continue;
}
boolean isDifferentAcrossTenants = !field.isAnnotationPresent(NotConflictingInApp.class);
String valueType = null;
Class<?> fieldType = field.getType();
if (fieldType == String.class) {
valueType = "string";
} else if (fieldType == boolean.class) {
valueType = "boolean";
} else if (fieldType == int.class || fieldType == long.class || fieldType == double.class) {
valueType = "number";
} else {
throw new RuntimeException("Unknown field type " + fieldType.getName());
}
String[] possibleValues = null;
if (field.isAnnotationPresent(EnumProperty.class)) {
valueType = "enum";
possibleValues = field.getAnnotation(EnumProperty.class).value();
}
JsonElement value = tenantConfig.get(field.getName());
JsonElement defaultValue = defaultConfig.get(field.getName());
boolean isNullable = defaultValue == null;
result.add(new ConfigFieldInfo(
key, valueType, value, description, isDifferentAcrossTenants,
possibleValues, isNullable, defaultValue, false, false));
} catch (NoSuchFieldException e) {
continue;
}
}
return result;
}
void assertThatConfigFromSameAppIdAreNotConflicting(CoreConfig other) throws InvalidConfigException {
// we do not allow different values for this across tenants in the same app
for (Field field : CoreConfig.class.getDeclaredFields()) {
@ -1277,24 +752,4 @@ public class CoreConfig {
public String getMaxCDIVersion() {
return this.supertokens_max_cdi_version;
}
private boolean isAnySet(List<String> configs){
for (String config : configs){
if(config!=null){
return true;
}
}
return false;
}
private boolean isAllSet(List<String> configs) {
boolean foundNotSet = false;
for(String config: configs){
if(config == null){
foundNotSet = true;
break;
}
}
return !foundNotSet;
}
}

View File

@ -1,33 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.config.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to provide a description for a configuration fields. To be used on the fields of `CoreConfig` and config
* class in the plugin like `PostgreSQLConfig`, `MysqlConfig`, etc.
*/
@Retention(RetentionPolicy.RUNTIME)
// Make annotation accessible at runtime so that config descriptions can be read from API
@Target(ElementType.FIELD) // Annotation can only be applied to fields
public @interface ConfigDescription {
String value(); // String value that provides a description for the configuration field
}

View File

@ -1,32 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.config.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* This annotation is used to mark a config field as an enum property. The value should be an array of strings which
* represent all possible values for the annotated config field.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface EnumProperty {
String[] value();
}

View File

@ -1,29 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.config.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
// Make annotation accessible at runtime so that config can be read from env
@Target(ElementType.FIELD) // Annotation can only be applied to fields
public @interface EnvName {
String value(); // String value that provides a env var name for the field
}

View File

@ -1,27 +0,0 @@
/*
* Copyright (c) 2023, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.config.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface HideFromDashboard {
}

View File

@ -82,8 +82,7 @@ public abstract class CronTask extends ResourceDistributor.SingletonResource imp
doTaskForTargetTenant(this.targetTenant);
} catch (Exception e) {
ProcessState.getInstance(main).addState(ProcessState.PROCESS_STATE.CRON_TASK_ERROR_LOGGING, e);
Logging.error(main, this.targetTenant, "Cronjob threw an exception: " + this.jobName, Main.isTesting,
e);
Logging.error(main, this.targetTenant, "Cronjob threw an exception: " + this.jobName, Main.isTesting, e);
if (e instanceof QuitProgramException) {
main.wakeUpMainThreadToShutdown();
}
@ -115,8 +114,7 @@ public abstract class CronTask extends ResourceDistributor.SingletonResource imp
doTaskPerApp(app);
} catch (Exception e) {
ProcessState.getInstance(main).addState(ProcessState.PROCESS_STATE.CRON_TASK_ERROR_LOGGING, e);
Logging.error(main, app.getAsPublicTenantIdentifier(),
"Cronjob threw an exception: " + this.jobName, Main.isTesting, e);
Logging.error(main, app.getAsPublicTenantIdentifier(), "Cronjob threw an exception: " + this.jobName, Main.isTesting, e);
if (e instanceof QuitProgramException) {
main.wakeUpMainThreadToShutdown();
}
@ -133,8 +131,7 @@ public abstract class CronTask extends ResourceDistributor.SingletonResource imp
} catch (Exception e) {
ProcessState.getInstance(main)
.addState(ProcessState.PROCESS_STATE.CRON_TASK_ERROR_LOGGING, e);
Logging.error(main, t.get(0), "Cronjob threw an exception: " + this.jobName, Main.isTesting,
e);
Logging.error(main, t.get(0), "Cronjob threw an exception: " + this.jobName, Main.isTesting, e);
if (e instanceof QuitProgramException) {
threwQuitProgramException.set(true);
}
@ -146,8 +143,7 @@ public abstract class CronTask extends ResourceDistributor.SingletonResource imp
} catch (Exception e) {
ProcessState.getInstance(main)
.addState(ProcessState.PROCESS_STATE.CRON_TASK_ERROR_LOGGING, e);
Logging.error(main, tenant, "Cronjob threw an exception: " + this.jobName,
Main.isTesting, e);
Logging.error(main, tenant, "Cronjob threw an exception: " + this.jobName, Main.isTesting, e);
if (e instanceof QuitProgramException) {
threwQuitProgramException.set(true);
}

View File

@ -28,7 +28,6 @@ import java.util.Map;
public class CronTaskTest extends SingletonResource {
private static final String RESOURCE_ID = "io.supertokens.cronjobs.CronTaskTest";
private Map<String, Integer> cronTaskToInterval = new HashMap<String, Integer>();
private Map<String, Integer> cronTaskToWaitTime = new HashMap<String, Integer>();
private CronTaskTest() {
@ -52,13 +51,4 @@ public class CronTaskTest extends SingletonResource {
public Integer getIntervalInSeconds(String resourceId) {
return cronTaskToInterval.get(resourceId);
}
@TestOnly
public void setInitialWaitTimeInSeconds(String resourceId, int interval) {
cronTaskToWaitTime.put(resourceId, interval);
}
public Integer getInitialWaitTimeInSeconds(String resourceId) {
return cronTaskToWaitTime.get(resourceId);
}
}

View File

@ -99,16 +99,6 @@ public class Cronjobs extends ResourceDistributor.SingletonResource {
}
}
public static boolean isCronjobLoaded(Main main, CronTask task) {
if (getInstance(main) == null) {
init(main);
}
Cronjobs instance = getInstance(main);
synchronized (instance.lock) {
return instance.tasks.contains(task);
}
}
@TestOnly
public List<CronTask> getTasks() {
return this.tasks;

View File

@ -1,194 +0,0 @@
/*
* Copyright (c) 2024. VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.bulkimport;
import io.supertokens.Main;
import io.supertokens.bulkimport.BulkImport;
import io.supertokens.bulkimport.BulkImportUserUtils;
import io.supertokens.config.Config;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.output.Logging;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.bulkimport.BulkImportStorage;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.sqlStorage.BulkImportSQLStorage;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ProcessBulkImportUsers extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.ProcessBulkImportUsers";
private ExecutorService executorService;
private ProcessBulkImportUsers(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("ProcessBulkImportUsers", main, tenantsInfo, true);
}
public static ProcessBulkImportUsers init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (ProcessBulkImportUsers) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new ProcessBulkImportUsers(main, tenantsInfo));
}
@Override
protected void doTaskPerApp(AppIdentifier app)
throws TenantOrAppNotFoundException, StorageQueryException {
if (StorageLayer.getBaseStorage(main).getType() != STORAGE_TYPE.SQL || StorageLayer.isInMemDb(main)) {
return;
}
BulkImportSQLStorage bulkImportSQLStorage = (BulkImportSQLStorage) StorageLayer
.getStorage(app.getAsPublicTenantIdentifier(), main);
//split the loaded users list into smaller chunks
int numberOfBatchChunks = Config.getConfig(app.getAsPublicTenantIdentifier(), main)
.getBulkMigrationParallelism();
int bulkMigrationBatchSize = Config.getConfig(app.getAsPublicTenantIdentifier(), main)
.getBulkMigrationBatchSize();
Logging.debug(main, app.getAsPublicTenantIdentifier(), "CronTask starts. Instance: " + this);
Logging.debug(main, app.getAsPublicTenantIdentifier(), "CronTask starts. Processing bulk import users with " + bulkMigrationBatchSize
+ " batch size, one batch split into " + numberOfBatchChunks + " chunks");
executorService = Executors.newFixedThreadPool(numberOfBatchChunks);
String[] allUserRoles = StorageUtils.getUserRolesStorage(bulkImportSQLStorage).getRoles(app);
BulkImportUserUtils bulkImportUserUtils = new BulkImportUserUtils(allUserRoles);
long newUsers = bulkImportSQLStorage.getBulkImportUsersCount(app, BulkImportStorage.BULK_IMPORT_USER_STATUS.NEW);
long processingUsers = bulkImportSQLStorage.getBulkImportUsersCount(app, BulkImportStorage.BULK_IMPORT_USER_STATUS.PROCESSING);
long failedUsers = 0;
//taking a "snapshot" here and processing in this round as many users as there are uploaded now. After this the processing will go on
//with another app and gets back here when all the apps had a chance.
long usersProcessed = 0;
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Found " + (newUsers + processingUsers) + " waiting for processing"
+ " (" + newUsers + " new, " + processingUsers + " processing)");;
while(usersProcessed < (newUsers + processingUsers)) {
List<BulkImportUser> users = bulkImportSQLStorage.getBulkImportUsersAndChangeStatusToProcessing(app,
bulkMigrationBatchSize);
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Loaded " + users.size() + " users to process");
if (users == null || users.isEmpty()) {
// "No more users to process!"
break;
}
List<List<BulkImportUser>> loadedUsersChunks = makeChunksOf(users, numberOfBatchChunks);
for (List<BulkImportUser> chunk : loadedUsersChunks) {
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Chunk size: " + chunk.size());
}
try {
List<Future<?>> tasks = new ArrayList<>();
for (int i = 0; i < numberOfBatchChunks && i < loadedUsersChunks.size(); i++) {
tasks.add(
executorService.submit(new ProcessBulkUsersImportWorker(main, app, loadedUsersChunks.get(i),
bulkImportSQLStorage, bulkImportUserUtils)));
}
for (Future<?> task : tasks) {
while (!task.isDone()) {
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Waiting for task " + task + " to finish");
Thread.sleep(1000);
}
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Task " + task + " finished");
try {
Void result = (Void) task.get(); //to know if there were any errors while executing and for
// waiting in this thread for all the other threads to finish up
Logging.debug(main, app.getAsPublicTenantIdentifier(),
"Task " + task + " finished with result: " + result);
} catch (ExecutionException executionException) {
Logging.error(main, app.getAsPublicTenantIdentifier(),
"Error while processing bulk import users", true,
executionException);
throw new RuntimeException(executionException);
}
usersProcessed += loadedUsersChunks.get(tasks.indexOf(task)).size();
failedUsers = bulkImportSQLStorage.getBulkImportUsersCount(app, BulkImportStorage.BULK_IMPORT_USER_STATUS.FAILED);
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Chunk " + tasks.indexOf(task) + " finished processing, all chunks processed: "
+ usersProcessed + " users (" + failedUsers + " failed)");
}
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Processing round finished");
} catch (InterruptedException e) {
Logging.error(main, app.getAsPublicTenantIdentifier(), "Error while processing bulk import users", true,
e);
throw new RuntimeException(e);
}
}
executorService.shutdownNow();
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
return BulkImport.PROCESS_USERS_INTERVAL_SECONDS;
}
@Override
public int getInitialWaitTimeSeconds() {
if (Main.isTesting) {
Integer waitTime = CronTaskTest.getInstance(main).getInitialWaitTimeInSeconds(RESOURCE_KEY);
if (waitTime != null) {
return waitTime;
}
}
return 0;
}
private List<List<BulkImportUser>> makeChunksOf(List<BulkImportUser> users, int numberOfChunks) {
List<List<BulkImportUser>> chunks = new ArrayList<>();
if (users != null && !users.isEmpty() && numberOfChunks > 0) {
AtomicInteger index = new AtomicInteger(0);
int chunkSize = users.size() / numberOfChunks + 1;
Stream<List<BulkImportUser>> listStream = users.stream()
.collect(Collectors.groupingBy(x -> index.getAndIncrement() / chunkSize))
.entrySet().stream()
.sorted(Map.Entry.comparingByKey()).map(Map.Entry::getValue);
listStream.forEach(chunks::add);
}
return chunks;
}
}

View File

@ -1,327 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.bulkimport;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.bulkimport.BulkImport;
import io.supertokens.bulkimport.BulkImportUserUtils;
import io.supertokens.bulkimport.exceptions.InvalidBulkImportDataException;
import io.supertokens.config.Config;
import io.supertokens.multitenancy.Multitenancy;
import io.supertokens.output.Logging;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.exceptions.BulkImportBatchInsertException;
import io.supertokens.pluginInterface.bulkimport.exceptions.BulkImportTransactionRolledBackException;
import io.supertokens.pluginInterface.bulkimport.sqlStorage.BulkImportSQLStorage;
import io.supertokens.pluginInterface.exceptions.DbInitException;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.sqlStorage.SQLStorage;
import io.supertokens.storageLayer.StorageLayer;
import java.io.IOException;
import java.util.*;
public class ProcessBulkUsersImportWorker implements Runnable {
private final Map<String, SQLStorage> userPoolToStorageMap = new HashMap<>();
private final Main main;
private final AppIdentifier app;
private final BulkImportSQLStorage bulkImportSQLStorage;
private final BulkImportUserUtils bulkImportUserUtils;
private final List<BulkImportUser> usersToProcess;
ProcessBulkUsersImportWorker(Main main, AppIdentifier app, List<BulkImportUser> usersToProcess, BulkImportSQLStorage bulkImportSQLStorage, BulkImportUserUtils bulkImportUserUtils){
this.main = main;
this.app = app;
this.usersToProcess = usersToProcess;
this.bulkImportSQLStorage = bulkImportSQLStorage;
this.bulkImportUserUtils = bulkImportUserUtils;
}
@Override
public void run() {
try {
processMultipleUsers(app, usersToProcess, bulkImportUserUtils, bulkImportSQLStorage);
} catch (TenantOrAppNotFoundException | DbInitException | IOException | StorageQueryException e) {
throw new RuntimeException(e);
}
}
private void processMultipleUsers(AppIdentifier appIdentifier, List<BulkImportUser> users,
BulkImportUserUtils bulkImportUserUtils,
BulkImportSQLStorage baseTenantStorage)
throws TenantOrAppNotFoundException, StorageQueryException, IOException,
DbInitException {
BulkImportUser user = null;
try {
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(),
"Processing bulk import users: " + users.size());
final Storage[] allStoragesForApp = getAllProxyStoragesForApp(main, appIdentifier);
int userIndexPointer = 0;
List<BulkImportUser> validUsers = new ArrayList<>();
Map<String, Exception> validationErrorsBeforeActualProcessing = new HashMap<>();
while(userIndexPointer < users.size()) {
user = users.get(userIndexPointer);
if (Main.isTesting && Main.isTesting_skipBulkImportUserValidationInCronJob) {
// Skip validation when the flag is enabled during testing
// Skip validation if it's a retry run. This already passed validation. A revalidation triggers
// an invalid external user id already exists validation error - which is not true!
validUsers.add(user);
} else {
// Validate the user
try {
validUsers.add(bulkImportUserUtils.createBulkImportUserFromJSON(main, appIdentifier,
user.toJsonObject(), BulkImportUserUtils.IDMode.READ_STORED));
} catch (InvalidBulkImportDataException exception) {
validationErrorsBeforeActualProcessing.put(user.id, new Exception(
String.valueOf(exception.errors)));
}
}
userIndexPointer+=1;
}
if(!validationErrorsBeforeActualProcessing.isEmpty()) {
throw new BulkImportBatchInsertException("Invalid input data", validationErrorsBeforeActualProcessing);
}
// Since all the tenants of a user must share the storage, we will just use the
// storage of the first tenantId of the first loginMethod
Map<SQLStorage, List<BulkImportUser>> partitionedUsers = partitionUsersByStorage(appIdentifier, validUsers);
for(SQLStorage bulkImportProxyStorage : partitionedUsers.keySet()) {
boolean shouldRetryImmediatley = true;
while (shouldRetryImmediatley) {
shouldRetryImmediatley = bulkImportProxyStorage.startTransaction(con -> {
try {
BulkImport.processUsersImportSteps(main, appIdentifier, bulkImportProxyStorage,
partitionedUsers.get(bulkImportProxyStorage),
allStoragesForApp);
bulkImportProxyStorage.commitTransactionForBulkImportProxyStorage();
String[] toDelete = new String[validUsers.size()];
for (int i = 0; i < validUsers.size(); i++) {
toDelete[i] = validUsers.get(i).id;
}
while (true){
try {
List<String> deletedIds = baseTenantStorage.deleteBulkImportUsers(appIdentifier,
toDelete);
break;
} catch (Exception e) {
// ignore and retry delete. The import transaction is already committed, the delete should happen no matter what
Logging.debug(main, app.getAsPublicTenantIdentifier(),
"Exception while deleting bulk import users: " + e.getMessage());
}
}
} catch (StorageTransactionLogicException | StorageQueryException e) {
// We need to rollback the transaction manually because we have overridden that in the proxy
// storage
bulkImportProxyStorage.rollbackTransactionForBulkImportProxyStorage();
if (isBulkImportTransactionRolledBackIsTheRealCause(e)) {
return true;
//@see BulkImportTransactionRolledBackException for explanation
}
handleProcessUserExceptions(app, validUsers, e, baseTenantStorage);
}
return false;
});
}
}
} catch (StorageTransactionLogicException | InvalidConfigException e) {
Logging.error(main, app.getAsPublicTenantIdentifier(),
"Error while processing bulk import users: " + e.getMessage(), true, e);
throw new RuntimeException(e);
} catch (BulkImportBatchInsertException insertException) {
handleProcessUserExceptions(app, users, insertException, baseTenantStorage);
} catch (Exception e) {
Logging.error(main, app.getAsPublicTenantIdentifier(),
"Error while processing bulk import users: " + e.getMessage(), true, e);
throw e;
} finally {
closeAllProxyStorages(); //closing it here to reuse the existing connection with all the users
}
}
private boolean isBulkImportTransactionRolledBackIsTheRealCause(Throwable exception) {
if(exception instanceof BulkImportTransactionRolledBackException){
return true;
} else if(exception.getCause()!=null){
return isBulkImportTransactionRolledBackIsTheRealCause(exception.getCause());
}
return false;
}
private void handleProcessUserExceptions(AppIdentifier appIdentifier, List<BulkImportUser> usersBatch, Exception e,
BulkImportSQLStorage baseTenantStorage)
throws StorageQueryException {
// Java doesn't allow us to reassign local variables inside a lambda expression
// so we have to use an array.
String[] errorMessage = { e.getMessage() };
Map<String, String> bulkImportUserIdToErrorMessage = new HashMap<>();
switch (e) {
case StorageTransactionLogicException exception -> {
// If the exception is due to a StorageQueryException, we want to retry the entry after sometime instead
// of marking it as FAILED. We will return early in that case.
if (exception.actualException instanceof StorageQueryException) {
Logging.error(main, null,
"We got an StorageQueryException while processing a bulk import user entry. It will be " +
"retried again. Error Message: " +
e.getMessage(), true);
return;
}
if (exception.actualException instanceof BulkImportBatchInsertException) {
handleBulkImportException(usersBatch, (BulkImportBatchInsertException) exception.actualException,
bulkImportUserIdToErrorMessage);
} else {
//fail the whole batch
errorMessage[0] = exception.actualException.getMessage();
for (BulkImportUser user : usersBatch) {
bulkImportUserIdToErrorMessage.put(user.id, errorMessage[0]);
}
}
}
case InvalidBulkImportDataException invalidBulkImportDataException ->
errorMessage[0] = invalidBulkImportDataException.errors.toString();
case InvalidConfigException invalidConfigException -> errorMessage[0] = e.getMessage();
case BulkImportBatchInsertException bulkImportBatchInsertException ->
handleBulkImportException(usersBatch, bulkImportBatchInsertException,
bulkImportUserIdToErrorMessage);
default -> {
Logging.error(main, null,
"We got an error while processing a bulk import user entry. It will be " +
"retried again. Error Message: " +
e.getMessage(), true);
}
}
try {
baseTenantStorage.startTransaction(con -> {
baseTenantStorage.updateMultipleBulkImportUsersStatusToError_Transaction(appIdentifier, con,
bulkImportUserIdToErrorMessage);
return null;
});
} catch (StorageTransactionLogicException e1) {
throw new StorageQueryException(e1.actualException);
}
}
private static void handleBulkImportException(List<BulkImportUser> usersBatch, BulkImportBatchInsertException exception,
Map<String, String> bulkImportUserIdToErrorMessage) {
Map<String, Exception> userIndexToError = exception.exceptionByUserId;
for(String userid : userIndexToError.keySet()){
Optional<BulkImportUser> userWithId = usersBatch.stream()
.filter(bulkImportUser -> userid.equals(bulkImportUser.id) || userid.equals(bulkImportUser.externalUserId)).findFirst();
String id = null;
if(userWithId.isPresent()){
id = userWithId.get().id;
}
if(id == null) {
userWithId = usersBatch.stream()
.filter(bulkImportUser ->
bulkImportUser.loginMethods.stream()
.map(loginMethod -> loginMethod.superTokensUserId)
.anyMatch(s -> s!= null && s.equals(userid))).findFirst();
if(userWithId.isPresent()){
id = userWithId.get().id;
}
}
bulkImportUserIdToErrorMessage.put(id, userIndexToError.get(userid).getMessage());
}
}
private synchronized Storage getBulkImportProxyStorage(TenantIdentifier tenantIdentifier)
throws InvalidConfigException, IOException, TenantOrAppNotFoundException, DbInitException {
String userPoolId = StorageLayer.getStorage(tenantIdentifier, main).getUserPoolId();
if (userPoolToStorageMap.containsKey(userPoolId)) {
return userPoolToStorageMap.get(userPoolId);
}
TenantConfig[] allTenants = Multitenancy.getAllTenants(main);
Map<ResourceDistributor.KeyClass, JsonObject> normalisedConfigs = Config.getNormalisedConfigsForAllTenants(
allTenants,
Config.getBaseConfigAsJsonObject(main));
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
if (key.getTenantIdentifier().equals(tenantIdentifier)) {
SQLStorage bulkImportProxyStorage = (SQLStorage) StorageLayer.getNewBulkImportProxyStorageInstance(main,
normalisedConfigs.get(key), tenantIdentifier, true);
userPoolToStorageMap.put(userPoolId, bulkImportProxyStorage);
bulkImportProxyStorage.initStorage(false, new ArrayList<>());
return bulkImportProxyStorage;
}
}
throw new TenantOrAppNotFoundException(tenantIdentifier);
}
private synchronized Storage[] getAllProxyStoragesForApp(Main main, AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
List<Storage> allProxyStorages = new ArrayList<>();
TenantConfig[] tenantConfigs = Multitenancy.getAllTenantsForApp(appIdentifier, main);
for (TenantConfig tenantConfig : tenantConfigs) {
allProxyStorages.add(getBulkImportProxyStorage(tenantConfig.tenantIdentifier));
}
return allProxyStorages.toArray(new Storage[0]);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E043: " + e.getMessage()));
} catch (InvalidConfigException e) {
throw new StorageTransactionLogicException(new InvalidConfigException("E044: " + e.getMessage()));
} catch (DbInitException e) {
throw new StorageTransactionLogicException(new DbInitException("E045: " + e.getMessage()));
} catch (IOException e) {
throw new StorageTransactionLogicException(new IOException("E046: " + e.getMessage()));
}
}
private void closeAllProxyStorages() throws StorageQueryException {
for (SQLStorage storage : userPoolToStorageMap.values()) {
storage.closeConnectionForBulkImportProxyStorage();
}
userPoolToStorageMap.clear();
}
private Map<SQLStorage, List<BulkImportUser>> partitionUsersByStorage(AppIdentifier appIdentifier, List<BulkImportUser> users)
throws DbInitException, TenantOrAppNotFoundException, InvalidConfigException, IOException {
Map<SQLStorage, List<BulkImportUser>> result = new HashMap<>();
for(BulkImportUser user: users) {
TenantIdentifier firstTenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), user.loginMethods.getFirst().tenantIds.getFirst());
SQLStorage bulkImportProxyStorage = (SQLStorage) getBulkImportProxyStorage(firstTenantIdentifier);
if(!result.containsKey(bulkImportProxyStorage)){
result.put(bulkImportProxyStorage, new ArrayList<>());
}
result.get(bulkImportProxyStorage).add(user);
}
return result;
}
}

View File

@ -1,63 +0,0 @@
package io.supertokens.cronjobs.cleanupOAuthSessionsAndChallenges;
import io.supertokens.Main;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.oauth.OAuthStorage;
import java.util.List;
public class CleanupOAuthSessionsAndChallenges extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.cleanupOAuthSessionsAndChallenges" +
".CleanupOAuthSessionsAndChallenges";
private CleanupOAuthSessionsAndChallenges(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("CleanupOAuthSessionsAndChallenges", main, tenantsInfo, true);
}
public static CleanupOAuthSessionsAndChallenges init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (CleanupOAuthSessionsAndChallenges) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new CleanupOAuthSessionsAndChallenges(main, tenantsInfo));
}
@Override
protected void doTaskPerStorage(Storage storage) throws Exception {
if (storage.getType() != STORAGE_TYPE.SQL) {
return;
}
OAuthStorage oauthStorage = StorageUtils.getOAuthStorage(storage);
long monthAgo = System.currentTimeMillis() / 1000 - 31 * 24 * 3600;
oauthStorage.deleteExpiredOAuthSessions(monthAgo);
oauthStorage.deleteExpiredOAuthM2MTokens(monthAgo);
oauthStorage.deleteOAuthLogoutChallengesBefore(System.currentTimeMillis() - 1000 * 60 * 60 * 48); // 48 hours
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
// Every 24 hours.
return 24 * 3600;
}
@Override
public int getInitialWaitTimeSeconds() {
if (!Main.isTesting) {
return getIntervalTimeSeconds();
} else {
return 0;
}
}
}

View File

@ -1,76 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.cleanupWebauthnExpiredData;
import io.supertokens.Main;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.webauthn.WebAuthNStorage;
import java.util.List;
public class CleanUpWebauthNExpiredDataCron extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.cleanupWebauthnExpiredData" +
".CleanUpWebauthnExpiredDataCron";
private CleanUpWebauthNExpiredDataCron(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("CleanUpWebauthnExpiredDataCron", main, tenantsInfo, true);
}
public static CleanUpWebauthNExpiredDataCron init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (CleanUpWebauthNExpiredDataCron) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new CleanUpWebauthNExpiredDataCron(main, tenantsInfo));
}
@Override
protected void doTaskPerStorage(Storage storage) throws Exception {
if (storage.getType() != STORAGE_TYPE.SQL) {
return;
}
WebAuthNStorage webAuthNStorage = StorageUtils.getWebAuthNStorage(storage);
webAuthNStorage.deleteExpiredAccountRecoveryTokens();
webAuthNStorage.deleteExpiredGeneratedOptions();
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
// Every 24 hours.
return 24 * 3600;
}
@Override
public int getInitialWaitTimeSeconds() {
if (!Main.isTesting) {
return getIntervalTimeSeconds();
} else {
return 0;
}
}
}

View File

@ -1,84 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.deadlocklogger;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.Arrays;
public class DeadlockLogger {
private static final DeadlockLogger INSTANCE = new DeadlockLogger();
private DeadlockLogger() {
}
public static DeadlockLogger getInstance() {
return INSTANCE;
}
public void start(){
Thread deadlockLoggerThread = new Thread(deadlockDetector, "DeadlockLoggerThread");
deadlockLoggerThread.setDaemon(true);
deadlockLoggerThread.start();
}
private final Runnable deadlockDetector = new Runnable() {
@Override
public void run() {
System.out.println("DeadlockLogger started!");
while (true) {
System.out.println("DeadlockLogger - checking");
ThreadMXBean bean = ManagementFactory.getThreadMXBean();
long[] threadIds = bean.findDeadlockedThreads(); // Returns null if no threads are deadlocked.
System.out.println("DeadlockLogger - DeadlockedThreads: " + Arrays.toString(threadIds));
if (threadIds != null) {
ThreadInfo[] infos = bean.getThreadInfo(threadIds);
boolean deadlockFound = false;
System.out.println("DEADLOCK found!");
for (ThreadInfo info : infos) {
System.out.println("ThreadName: " + info.getThreadName());
System.out.println("Thread ID: " + info.getThreadId());
System.out.println("LockName: " + info.getLockName());
System.out.println("LockOwnerName: " + info.getLockOwnerName());
System.out.println("LockedMonitors: " + Arrays.toString(info.getLockedMonitors()));
System.out.println("LockInfo: " + info.getLockInfo());
System.out.println("Stack: " + Arrays.toString(info.getStackTrace()));
System.out.println();
deadlockFound = true;
}
System.out.println("*******************************");
if(deadlockFound) {
System.out.println(" ==== ALL THREAD INFO ===");
ThreadInfo[] allThreads = bean.dumpAllThreads(true, true, 100);
for (ThreadInfo threadInfo : allThreads) {
System.out.println("THREAD: " + threadInfo.getThreadName());
System.out.println("StackTrace: " + Arrays.toString(threadInfo.getStackTrace()));
}
break;
}
}
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
};
}

View File

@ -1,53 +0,0 @@
package io.supertokens.cronjobs.deleteExpiredSAMLData;
import java.util.List;
import io.supertokens.Main;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.saml.SAMLStorage;
public class DeleteExpiredSAMLData extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.deleteExpiredSAMLData" +
".DeleteExpiredSAMLData";
private DeleteExpiredSAMLData(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("DeleteExpiredSAMLData", main, tenantsInfo, false);
}
public static DeleteExpiredSAMLData init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (DeleteExpiredSAMLData) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new DeleteExpiredSAMLData(main, tenantsInfo));
}
@Override
protected void doTaskPerStorage(Storage storage) throws Exception {
SAMLStorage samlStorage = StorageUtils.getSAMLStorage(storage);
samlStorage.removeExpiredSAMLCodesAndRelayStates();
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
// Every hour
return 3600;
}
@Override
public int getInitialWaitTimeSeconds() {
if (!Main.isTesting) {
return getIntervalTimeSeconds();
} else {
return 0;
}
}
}

Some files were not shown because too many files have changed in this diff Show More