Compare commits

..

2 Commits
master ... 9.1

Author SHA1 Message Date
Sattvik Chakravarthy 763616fe37 fix: flaky test 2025-08-19 12:12:28 +05:30
Tamas Soltesz 0d7160d1fc
backport: logs to otel (#1165)
* backport: logs to otel

* fix: for backport release 9.1

* fix: config description

---------

Co-authored-by: Sattvik Chakravarthy <sattvik@gmail.com>
2025-08-18 14:00:19 +05:30
558 changed files with 4308 additions and 42423 deletions

View File

@ -1,64 +0,0 @@
FROM ubuntu:22.04
RUN apt-get update -y
#&& apt-get upgrade -y
RUN apt-get install build-essential -y --fix-missing
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN [ -d /var/run/mysqld ] || mkdir -p /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
RUN mkdir /usr/java
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 21.0.7
RUN wget https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz -C /usr/java/
RUN mv /usr/java/jdk-21.0.7+6 /usr/java/jdk-21.0.7
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'JRE_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 1
#install postgres 13
# Import Repository Signing Key
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install curl gpg gnupg2 software-properties-common apt-transport-https lsb-release ca-certificates sudo -y
# Add PostgreSQL repository
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list
# Update again
RUN apt update
# Install PostgreSQL 13
RUN apt install -y postgresql-13
# Verify PostgreSQL 13 Installation on Ubuntu 22.04|20.04|18.04
RUN psql --version
# Manage PostgreSQL 13 service
#you can manage with `service postgresql start`

View File

@ -1,57 +0,0 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 21.0.7
RUN wget https://download.java.net/java/GA/jdk21.0.7/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-21.0.7_linux-x64_bin.tar.gz
RUN mv openjdk-21.0.7_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-21.0.7_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,57 +0,0 @@
FROM ubuntu:18.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN mkdir /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
# Install OpenJDK 12
RUN wget https://download.java.net/java/GA/jdk12.0.2/e482c34c86bd4bf8b56c0b35558996b9/10/GPL/openjdk-12.0.2_linux-x64_bin.tar.gz
RUN mkdir /usr/java
RUN mv openjdk-12.0.2_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-12.0.2_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-12.0.2' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 15.0.1
RUN wget https://download.java.net/java/GA/jdk15.0.1/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/openjdk-15.0.1_linux-x64_bin.tar.gz
RUN mv openjdk-15.0.1_linux-x64_bin.tar.gz /usr/java
RUN cd /usr/java && tar -xzvf openjdk-15.0.1_linux-x64_bin.tar.gz
RUN echo 'JAVA_HOME=/usr/java/jdk-15.0.1' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-12.0.2/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-12.0.2/bin/javac" 1

View File

@ -1,63 +0,0 @@
FROM ubuntu:22.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
RUN echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
RUN apt install mysql-server -y
RUN usermod -d /var/lib/mysql/ mysql
RUN [ -d /var/run/mysqld ] || mkdir -p /var/run/mysqld
ADD ./runMySQL.sh /runMySQL.sh
RUN chmod +x /runMySQL.sh
RUN apt-get install -y git-core
RUN apt-get install -y wget
RUN mkdir /usr/java
RUN apt-get install jq -y
RUN apt-get install curl -y
RUN apt-get install unzip -y
# Install OpenJDK 21.0.7
RUN wget https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz /usr/java
RUN mkdir -p /usr/java/jdk-21.0.7
RUN cd /usr/java && tar -xzvf OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz -C /usr/java/jdk-21.0.7
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 1
#install postgres 13
# Import Repository Signing Key
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install curl gpg gnupg2 software-properties-common apt-transport-https lsb-release ca-certificates sudo -y
# Add PostgreSQL repository
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list
# Update again
RUN apt update
# Install PostgreSQL 13
RUN apt install -y postgresql-13
# Verify PostgreSQL 13 Installation on Ubuntu 22.04|20.04|18.04
RUN psql --version
# Manage PostgreSQL 13 service
#you can manage with `service postgresql start`

View File

@ -1,94 +0,0 @@
version: 2.1
orbs:
slack: circleci/slack@3.4.2
jobs:
test:
docker:
- image: tamassupertokens/supertokens_core_testing
- image: rishabhpoddar/oauth-server-cicd
- image: mongo
environment:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: root
resource_class: large
parallelism: 4
parameters:
plugin:
type: string
steps:
- checkout
- run: mkdir ~/junit
- run: echo $'\n[mysqld]\ncharacter_set_server=utf8mb4\nmax_connections=10000' >> /etc/mysql/mysql.cnf
- run: echo "host all all 0.0.0.0/0 md5" >> /etc/postgresql/13/main/pg_hba.conf
- run: echo "listen_addresses='*'" >> /etc/postgresql/13/main/postgresql.conf
- run: sed -i 's/^#*\s*max_connections\s*=.*/max_connections = 10000/' /etc/postgresql/13/main/postgresql.conf
- run: (cd .circleci/ && ./doTests.sh << parameters.plugin >>)
- store_test_results:
path: ~/junit
- slack/status
mark-passed:
docker:
- image: tamassupertokens/supertokens_core_testing
steps:
- checkout
- run: (cd .circleci && ./markPassed.sh)
- slack/status
workflows:
version: 2
tagged-build:
jobs:
- test:
plugin: sqlite
name: test-sqlite
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: mongodb
name: test-mongodb
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: postgresql
name: test-postgresql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- test:
plugin: mysql
name: test-mysql
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
only: /test-cicd\/.*/
- mark-passed:
context:
- slack-notification
filters:
tags:
only: /dev-v[0-9]+(\.[0-9]+)*/
branches:
ignore: /.*/
requires:
- test-sqlite
- test-mongodb
- test-postgresql
- test-mysql

View File

@ -1,260 +0,0 @@
function cleanup {
if test -f "pluginInterfaceExactVersionsOutput"; then
rm pluginInterfaceExactVersionsOutput
fi
}
trap cleanup EXIT
cleanup
pluginToTest=$1
pinnedDBJson=$(curl -s -X GET \
'https://api.supertokens.io/0/plugin/pinned?planType=FREE' \
-H 'api-version: 0')
pinnedDBLength=$(echo "$pinnedDBJson" | jq ".plugins | length")
pinnedDBArray=$(echo "$pinnedDBJson" | jq ".plugins")
echo "got pinned dbs..."
pluginInterfaceJson=$(cat ../pluginInterfaceSupported.json)
pluginInterfaceLength=$(echo "$pluginInterfaceJson" | jq ".versions | length")
pluginInterfaceArray=$(echo "$pluginInterfaceJson" | jq ".versions")
echo "got plugin interface relations"
coreDriverJson=$(cat ../coreDriverInterfaceSupported.json)
coreDriverArray=$(echo "$coreDriverJson" | jq ".versions")
echo "got core driver relations"
./getPluginInterfaceExactVersions.sh "$pluginInterfaceLength" "$pluginInterfaceArray"
if [[ $? -ne 0 ]]
then
echo "all plugin interfaces found... failed. exiting!"
exit 1
else
echo "all plugin interfaces found..."
fi
# get core version
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PUT \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"pluginInterfaces\": $pluginInterfaceArray,
\"coreDriverInterfaces\": $coreDriverArray
}")
if [ "$responseStatus" -ne "200" ]
then
echo "failed core PUT API status code: $responseStatus. Exiting!"
exit 1
fi
mkdir -p ~/junit
someTestsRan=false
while read -u 10 line
do
if [[ $line = "" ]]; then
continue
fi
i=0
currTag=$(echo "$line" | jq .tag)
currTag=$(echo "$currTag" | tr -d '"')
currVersion=$(echo "$line" | jq .version)
currVersion=$(echo "$currVersion" | tr -d '"')
piX=$(cut -d'.' -f1 <<<"$currVersion")
piY=$(cut -d'.' -f2 <<<"$currVersion")
piVersion="$piX.$piY"
while [ $i -lt "$pinnedDBLength" ]; do
someTestsRan=true
currPinnedDb=$(echo "$pinnedDBArray" | jq ".[$i]")
currPinnedDb=$(echo "$currPinnedDb" | tr -d '"')
i=$((i+1))
if [[ $currPinnedDb == $pluginToTest ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion ====="
echo ""
echo ""
echo ""
echo ""
echo ""
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/dependency/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$piVersion&pluginName=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .plugin) == "null" ]]
then
echo "fetching latest X.Y version for $currPinnedDb given plugin-interface X.Y version: $piVersion gave response: $response"
exit 1
fi
pinnedDbVersionX2=$(echo $response | jq .plugin | tr -d '"')
response=$(curl -s -X GET \
"https://api.supertokens.io/0/plugin/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$pinnedDbVersionX2&name=$currPinnedDb" \
-H 'api-version: 0')
if [[ $(echo "$response" | jq .tag) == "null" ]]
then
echo "fetching latest X.Y.Z version for $currPinnedDb, X.Y version: $pinnedDbVersionX2 gave response: $response"
exit 1
fi
pinnedDbVersionTag=$(echo "$response" | jq .tag | tr -d '"')
pinnedDbVersion=$(echo "$response" | jq .version | tr -d '"')
./startDb.sh "$currPinnedDb"
fi
cd ../../
git clone git@github.com:supertokens/supertokens-root.git
cd supertokens-root
rm gradle.properties
update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 2
update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 2
coreX=$(cut -d'.' -f1 <<<"$coreVersion")
coreY=$(cut -d'.' -f2 <<<"$coreVersion")
if [[ $currPinnedDb == "sqlite" ]]
then
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion" > modules.txt
else
echo -e "core,$coreX.$coreY\nplugin-interface,$piVersion\n$currPinnedDb-plugin,$pinnedDbVersionX2" > modules.txt
fi
./loadModules
cd supertokens-core
git checkout dev-v$coreVersion
cd ../supertokens-plugin-interface
git checkout $currTag
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
cd ../supertokens-$currPinnedDb-plugin
git checkout $pinnedDbVersionTag
fi
cd ../
echo $SUPERTOKENS_API_KEY > apiPassword
./startTestingEnv --cicd
TEST_EXIT_CODE=$?
if [ -d ~/junit ]
then
echo "Copying output from core"
cp ~/supertokens-root/supertokens-core/build/test-results/test/*.xml ~/junit/
if [[ $pluginToTest != "sqlite" ]]
then
echo "Copying output from plugin"
cp ~/supertokens-root/supertokens-$pluginToTest-plugin/build/test-results/test/*.xml ~/junit/
fi
fi
if [[ $TEST_EXIT_CODE -ne 0 ]]
then
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion FAILED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cat logs/*
cd ../project/
echo "test failed... exiting!"
exit 1
fi
echo ""
echo ""
echo ""
echo ""
echo ""
echo "===== testing $currPinnedDb with plugin-interface $currVersion SUCCEEDED ====="
echo ""
echo ""
echo ""
echo ""
echo ""
cd ..
rm -rf supertokens-root
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
curl -o supertokens.zip -s -X GET \
"https://api.supertokens.io/0/app/download?pluginName=$currPinnedDb&os=linux&mode=DEV&binary=FREE&targetCore=$coreVersion&targetPlugin=$pinnedDbVersion" \
-H 'api-version: 0'
unzip supertokens.zip -d .
rm supertokens.zip
cd supertokens
../project/.circleci/testCli.sh
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
cd ../
fi
rm -rf supertokens
cd project/.circleci
if [[ $currPinnedDb == "sqlite" ]]
then
# shellcheck disable=SC2034
continue=1
else
./stopDb.sh $currPinnedDb
fi
fi
done
done 10<pluginInterfaceExactVersionsOutput
if [[ $someTestsRan = "true" ]]
then
echo "tests ran successfully"
else
echo "no test ran"
exit 1
fi

View File

@ -1,19 +0,0 @@
# args: <length of array> <array like ["0.0", "0.1"]>
touch pluginInterfaceExactVersionsOutput
i=0
while [ $i -lt $1 ]; do
currVersion=`echo $2 | jq ".[$i]"`
currVersion=`echo $currVersion | tr -d '"'`
i=$((i+1))
# now we have the current version like 0.0.
# We now have to find something that matches dev-v0.0.* or v0.0.*
response=`curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$currVersion" \
-H 'api-version: 0'`
if [[ `echo $response | jq .tag` == "null" ]]
then
echo $response
exit 1
fi
echo $response >> pluginInterfaceExactVersionsOutput
done

View File

@ -1,29 +0,0 @@
coreVersion=$(cat ../build.gradle | grep -e "version =" -e "version=")
while IFS='"' read -ra ADDR; do
counter=0
for i in "${ADDR[@]}"; do
if [ $counter == 1 ]
then
coreVersion=$i
fi
counter=$(($counter+1))
done
done <<< "$coreVersion"
echo "calling /core PATCH to make testing passed"
responseStatus=$(curl -s -o /dev/null -w "%{http_code}" -X PATCH \
https://api.supertokens.io/0/core \
-H 'Content-Type: application/json' \
-H 'api-version: 0' \
-d "{
\"password\": \"$SUPERTOKENS_API_KEY\",
\"planType\":\"FREE\",
\"version\":\"$coreVersion\",
\"testPassed\": true
}")
if [ "$responseStatus" -ne "200" ]
then
echo "patch api failed"
exit 1
fi

View File

@ -1 +0,0 @@
chown -R mysql:mysql /var/lib/mysql /var/run/mysqld && service mysql start

View File

@ -1,113 +0,0 @@
case $1 in
mysql)
(cd / && ./runMySQL.sh)
mysql -u root --password=root -e "CREATE DATABASE supertokens;"
mysql -u root --password=root -e "CREATE DATABASE st0;"
mysql -u root --password=root -e "CREATE DATABASE st1;"
mysql -u root --password=root -e "CREATE DATABASE st2;"
mysql -u root --password=root -e "CREATE DATABASE st3;"
mysql -u root --password=root -e "CREATE DATABASE st4;"
mysql -u root --password=root -e "CREATE DATABASE st5;"
mysql -u root --password=root -e "CREATE DATABASE st6;"
mysql -u root --password=root -e "CREATE DATABASE st7;"
mysql -u root --password=root -e "CREATE DATABASE st8;"
mysql -u root --password=root -e "CREATE DATABASE st9;"
mysql -u root --password=root -e "CREATE DATABASE st10;"
mysql -u root --password=root -e "CREATE DATABASE st11;"
mysql -u root --password=root -e "CREATE DATABASE st12;"
mysql -u root --password=root -e "CREATE DATABASE st13;"
mysql -u root --password=root -e "CREATE DATABASE st14;"
mysql -u root --password=root -e "CREATE DATABASE st15;"
mysql -u root --password=root -e "CREATE DATABASE st16;"
mysql -u root --password=root -e "CREATE DATABASE st17;"
mysql -u root --password=root -e "CREATE DATABASE st18;"
mysql -u root --password=root -e "CREATE DATABASE st19;"
mysql -u root --password=root -e "CREATE DATABASE st20;"
mysql -u root --password=root -e "CREATE DATABASE st21;"
mysql -u root --password=root -e "CREATE DATABASE st22;"
mysql -u root --password=root -e "CREATE DATABASE st23;"
mysql -u root --password=root -e "CREATE DATABASE st24;"
mysql -u root --password=root -e "CREATE DATABASE st25;"
mysql -u root --password=root -e "CREATE DATABASE st26;"
mysql -u root --password=root -e "CREATE DATABASE st27;"
mysql -u root --password=root -e "CREATE DATABASE st28;"
mysql -u root --password=root -e "CREATE DATABASE st29;"
mysql -u root --password=root -e "CREATE DATABASE st30;"
mysql -u root --password=root -e "CREATE DATABASE st31;"
mysql -u root --password=root -e "CREATE DATABASE st32;"
mysql -u root --password=root -e "CREATE DATABASE st33;"
mysql -u root --password=root -e "CREATE DATABASE st34;"
mysql -u root --password=root -e "CREATE DATABASE st35;"
mysql -u root --password=root -e "CREATE DATABASE st36;"
mysql -u root --password=root -e "CREATE DATABASE st37;"
mysql -u root --password=root -e "CREATE DATABASE st38;"
mysql -u root --password=root -e "CREATE DATABASE st39;"
mysql -u root --password=root -e "CREATE DATABASE st40;"
mysql -u root --password=root -e "CREATE DATABASE st41;"
mysql -u root --password=root -e "CREATE DATABASE st42;"
mysql -u root --password=root -e "CREATE DATABASE st43;"
mysql -u root --password=root -e "CREATE DATABASE st44;"
mysql -u root --password=root -e "CREATE DATABASE st45;"
mysql -u root --password=root -e "CREATE DATABASE st46;"
mysql -u root --password=root -e "CREATE DATABASE st47;"
mysql -u root --password=root -e "CREATE DATABASE st48;"
mysql -u root --password=root -e "CREATE DATABASE st49;"
mysql -u root --password=root -e "CREATE DATABASE st50;"
;;
postgresql)
service postgresql start
sudo -u postgres psql --command "CREATE USER root WITH SUPERUSER PASSWORD 'root';"
createdb
psql -c "create database supertokens;"
psql -c "create database st0;"
psql -c "create database st1;"
psql -c "create database st2;"
psql -c "create database st3;"
psql -c "create database st4;"
psql -c "create database st5;"
psql -c "create database st6;"
psql -c "create database st7;"
psql -c "create database st8;"
psql -c "create database st9;"
psql -c "create database st10;"
psql -c "create database st11;"
psql -c "create database st12;"
psql -c "create database st13;"
psql -c "create database st14;"
psql -c "create database st15;"
psql -c "create database st16;"
psql -c "create database st17;"
psql -c "create database st18;"
psql -c "create database st19;"
psql -c "create database st20;"
psql -c "create database st21;"
psql -c "create database st22;"
psql -c "create database st23;"
psql -c "create database st24;"
psql -c "create database st25;"
psql -c "create database st26;"
psql -c "create database st27;"
psql -c "create database st28;"
psql -c "create database st29;"
psql -c "create database st30;"
psql -c "create database st31;"
psql -c "create database st32;"
psql -c "create database st33;"
psql -c "create database st34;"
psql -c "create database st35;"
psql -c "create database st36;"
psql -c "create database st37;"
psql -c "create database st38;"
psql -c "create database st39;"
psql -c "create database st40;"
psql -c "create database st41;"
psql -c "create database st42;"
psql -c "create database st43;"
psql -c "create database st44;"
psql -c "create database st45;"
psql -c "create database st46;"
psql -c "create database st47;"
psql -c "create database st48;"
psql -c "create database st49;"
psql -c "create database st50;"
esac

View File

@ -1,8 +0,0 @@
case $1 in
mysql)
service mysql stop
;;
postgresql)
service postgresql stop
;;
esac

View File

@ -1,71 +0,0 @@
# inside supertokens downloaded zip
./install
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens start --port=8888
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
sed -i 's/# mysql_connection_uri:/mysql_connection_uri: "mysql:\/\/root:root@localhost:3306?rewriteBatchedStatements=true"/g' /usr/lib/supertokens/config.yaml
sed -i 's/# mongodb_connection_uri:/mongodb_connection_uri: mongodb:\/\/root:root@localhost:27017/g' /usr/lib/supertokens/config.yaml
sed -i 's/# disable_telemetry:/disable_telemetry: true/g' /usr/lib/supertokens/config.yaml
supertokens start --port=8889
supertokens list
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8889/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
curl http://localhost:8888/hello
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens stop
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi
supertokens uninstall
if [[ $? -ne 0 ]]
then
echo "cli testing failed... exiting!"
exit 1
fi

View File

@ -37,7 +37,6 @@ highlighting the necessary changes)
- If no such branch exists, then create one from the latest released branch.
- [ ] If added a foreign key constraint on `app_id_to_user_id` table, make sure to delete from this table when deleting
the user as well if `deleteUserIdMappingToo` is false.
- [ ] If added a new recipe, then make sure to update the bulk import API to include the new recipe.
## Remaining TODOs for this PR

View File

@ -1,76 +0,0 @@
FROM ubuntu:22.04 AS tmp
RUN apt-get update && apt-get upgrade -y
RUN apt-get install build-essential -y
RUN apt-get install -y git-core wget unzip jq curl
# Install OpenJDK 21.0.7
RUN wget https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz /usr/java
RUN mkdir -p /usr/java/
RUN cd /usr/java && tar -xzvf OpenJDK21U-jdk_x64_linux_hotspot_21.0.7_6.tar.gz
RUN mv /usr/java/jdk-21.0.7+6 /usr/java/jdk-21.0.7
RUN echo 'JAVA_HOME=/usr/java/jdk-21.0.7' >> /etc/profile
RUN echo 'PATH=$PATH:$HOME/bin:$JAVA_HOME/bin' >> /etc/profile
RUN echo 'export JAVA_HOME' >> /etc/profile
RUN echo 'export JRE_HOME' >> /etc/profile
RUN echo 'export PATH' >> /etc/profile
RUN update-alternatives --install "/usr/bin/java" "java" "/usr/java/jdk-21.0.7/bin/java" 1
RUN update-alternatives --install "/usr/bin/javac" "javac" "/usr/java/jdk-21.0.7/bin/javac" 1
RUN wget -O docker-entrypoint.sh https://raw.githubusercontent.com/supertokens/supertokens-docker-postgresql/master/docker-entrypoint.sh
# RUN wget https://services.gradle.org/distributions/gradle-7.0-all.zip
# RUN unzip gradle-7.0-all.zip
# ENV GRADLE_HOME=/gradle-7.0
# ENV PATH=$PATH:$GRADLE_HOME/bin
RUN git clone https://github.com/supertokens/supertokens-root.git
WORKDIR /supertokens-root
COPY ./pluginInterfaceSupported.json pluginInterfaceSupported.json
RUN git clone --single-branch --branch "$(cat pluginInterfaceSupported.json | jq '.versions[-1]' | tr -d '"')" "https://github.com/supertokens/supertokens-plugin-interface.git"
RUN mkdir -p supertokens-core
COPY ./ supertokens-core
RUN echo "org.gradle.vfs.watch=false" >> ./gradle.properties
RUN ./loadModules
RUN ./utils/setupTestEnv --local
FROM debian:bookworm-slim
RUN groupadd supertokens && useradd -m -s /bin/bash -g supertokens supertokens
RUN apt-get update && apt-get install -y --no-install-recommends gnupg dirmngr curl unzip && rm -rf /var/lib/apt/lists/*
ENV GOSU_VERSION=1.7
RUN set -x \
&& apt-get update && apt-get install -y --no-install-recommends ca-certificates wget && rm -rf /var/lib/apt/lists/* \
&& wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture)" \
&& wget -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture).asc" \
&& export GNUPGHOME="$(mktemp -d)" \
&& gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 \
&& gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu \
&& gpgconf --kill all \
&& rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc \
&& chmod +x /usr/local/bin/gosu \
&& wget -O jre.zip "https://raw.githubusercontent.com/supertokens/jre/master/jre-21.0.7-linux.zip" \
&& mkdir -p /usr/lib/supertokens/jre \
&& unzip jre.zip \
&& mv jre-*/* /usr/lib/supertokens/jre \
&& apt-get purge -y --auto-remove ca-certificates wget unzip \
&& rm -rf jre.zip
COPY --from=tmp --chown=supertokens /supertokens-root/core /usr/lib/supertokens/core
COPY --from=tmp --chown=supertokens /supertokens-root/plugin-interface /usr/lib/supertokens/plugin-interface
COPY --from=tmp --chown=supertokens /supertokens-root/ee /usr/lib/supertokens/ee
COPY --from=tmp --chown=supertokens /supertokens-root/temp/config.yaml /usr/lib/supertokens/config.yaml
COPY --from=tmp --chown=supertokens /supertokens-root/version.yaml /usr/lib/supertokens/version.yaml
COPY --from=tmp --chown=supertokens /docker-entrypoint.sh /usr/local/bin/
RUN mkdir -p /lib/supertokens
RUN chown -R supertokens:supertokens /lib/supertokens
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
RUN echo "$(md5sum /usr/lib/supertokens/config.yaml | awk '{ print $1 }')" >> /CONFIG_HASH
RUN ln -s /usr/local/bin/docker-entrypoint.sh /entrypoint.sh # backwards compat
EXPOSE 3567
USER "supertokens"
CMD ["/usr/lib/supertokens/jre/bin/java", "-classpath", "/usr/lib/supertokens/core/*:/usr/lib/supertokens/plugin-interface/*:/usr/lib/supertokens/ee/*", "io.supertokens.Main", "/usr/lib/supertokens", "DEV", "host=0.0.0.0", "test_mode", "tempDirLocation=/usr/lib/supertokens/temp", "configFile=/usr/lib/supertokens/temp/config.yaml"]
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]

View File

@ -65,4 +65,4 @@ register_plugin_version(
plugin_version=plugin_version,
plugin_interface_array=plugin_interface_array,
plugin_name=os.environ.get("PLUGIN_NAME")
)
)

View File

@ -45,16 +45,16 @@ jobs:
runs-on: ubuntu-latest
needs: dependency-branches
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root

View File

@ -1,134 +0,0 @@
name: Container Security Scan
on:
# Allow manual triggering
workflow_dispatch:
# Run automatically once a day at 2 AM UTC
schedule:
- cron: '0 2 * * *'
jobs:
container-scan:
name: Scan SuperTokens PostgreSQL Container
runs-on: ubuntu-latest
steps:
- name: Run Azure Container Scan
id: container-scan
uses: Azure/container-scan@v0
continue-on-error: true
with:
image-name: supertokens/supertokens-postgresql:latest
severity-threshold: LOW
run-quality-checks: false
env:
DOCKER_CONTENT_TRUST: 1
- name: Upload scan results
id: upload-scan-results
uses: actions/upload-artifact@v4
with:
name: container-scan-results
path: |
${{ steps.container-scan.outputs.scan-report-path }}
retention-days: 30
- name: Generate Security Summary
id: security-summary
run: |
echo "summary<<EOF" >> $GITHUB_OUTPUT
echo "**Image:** \`supertokens/supertokens-postgresql:latest\`\n" >> $GITHUB_OUTPUT
echo "**Scan Date:** \`$(date -u)\`\n" >> $GITHUB_OUTPUT
echo "\n" >> $GITHUB_OUTPUT
# Get the scan report path from the container scan output
SCAN_REPORT_PATH="${{ steps.container-scan.outputs.scan-report-path }}"
if [ -f "$SCAN_REPORT_PATH" ]; then
# Count vulnerabilities by severity using the correct JSON structure
critical=$(jq '[.vulnerabilities[]? | select(.severity == "CRITICAL")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
high=$(jq '[.vulnerabilities[]? | select(.severity == "HIGH")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
medium=$(jq '[.vulnerabilities[]? | select(.severity == "MEDIUM")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
low=$(jq '[.vulnerabilities[]? | select(.severity == "LOW")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
total_vulns=$(jq '[.vulnerabilities[]?] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
echo "**Total Vulnerabilities:** $total_vulns\n" >> $GITHUB_OUTPUT
echo "\n" >> $GITHUB_OUTPUT
echo "- 🔴 **Critical**: $critical\n" >> $GITHUB_OUTPUT
echo "- 🟠 **High**: $high\n" >> $GITHUB_OUTPUT
echo "- 🟡 **Medium**: $medium\n" >> $GITHUB_OUTPUT
echo "- 🟢 **Low**: $low\n" >> $GITHUB_OUTPUT
echo "\n" >> $GITHUB_OUTPUT
else
echo "❌ **Scan results not found or scan failed**" >> $GITHUB_OUTPUT
fi
echo "\n" >> $GITHUB_OUTPUT
echo "[📃 Download the full report](${{ steps.upload-scan-results.outputs.artifact-url }})\n" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Add to Action Summary
run: |
echo "**Image:** \`supertokens/supertokens-postgresql:latest\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Scan Date:** \`$(date -u)\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Get the scan report path from the container scan output
SCAN_REPORT_PATH="${{ steps.container-scan.outputs.scan-report-path }}"
if [ -f "$SCAN_REPORT_PATH" ]; then
# Count vulnerabilities by severity using the correct JSON structure
critical=$(jq '[.vulnerabilities[]? | select(.severity == "CRITICAL")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
high=$(jq '[.vulnerabilities[]? | select(.severity == "HIGH")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
medium=$(jq '[.vulnerabilities[]? | select(.severity == "MEDIUM")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
low=$(jq '[.vulnerabilities[]? | select(.severity == "LOW")] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
total_vulns=$(jq '[.vulnerabilities[]?] | length' "$SCAN_REPORT_PATH" 2>/dev/null || echo "0")
echo "**Total Vulnerabilities:** $total_vulns" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- 🔴 **Critical**: $critical" >> $GITHUB_STEP_SUMMARY
echo "- 🟠 **High**: $high" >> $GITHUB_STEP_SUMMARY
echo "- 🟡 **Medium**: $medium" >> $GITHUB_STEP_SUMMARY
echo "- 🟢 **Low**: $low" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Vulnerabilities:**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| ID | Package | Severity | | Description |" >> $GITHUB_STEP_SUMMARY
echo "|----|---------|----------|-|-------------|" >> $GITHUB_STEP_SUMMARY
# Extract and format vulnerabilities into a table with colored severity indicators, excluding LOW severity
jq -r '.vulnerabilities[]? | select(.severity != "LOW") | "| \(.vulnerabilityId // "N/A") | \(.packageName // "N/A") | \(.severity // "UNKNOWN") | \(if .severity == "CRITICAL" then "🔴" elif .severity == "HIGH" then "🟠" elif .severity == "MEDIUM" then "🟡" else "🟢" end) | \((.description // "No description available") | gsub("\n"; " ")) |"' "$SCAN_REPORT_PATH" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Scan results not found or scan failed**" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "[📃 Download the full report](${{ steps.upload-scan-results.outputs.artifact-url }})" >> $GITHUB_STEP_SUMMARY
- name: Post notification on Slack channel
id: deployment_message
uses: slackapi/slack-github-action@v2.1.0
with:
method: chat.postMessage
token: ${{ secrets.SLACK_BOT_TOKEN }}
payload: |
channel: ${{ secrets.SLACK_CHANNEL_ID }}
text: ""
blocks:
- type: "header"
text:
type: "plain_text"
text: "${{ steps.container-scan.outcome == 'success' && '✅' || '❌' }} Vulnerability Report: ${{ steps.container-scan.outcome == 'success' && 'All okay' || 'Needs attention' }}"
- type: "markdown"
text: "${{ steps.security-summary.outputs.summary }}"

View File

@ -53,10 +53,10 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- name: Login to Docker Hub
uses: docker/login-action@v3
@ -81,16 +81,16 @@ jobs:
runs-on: ubuntu-latest
needs: [dependency-branches, release-docker]
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- name: Checkout supertokens-core
run: |
cd supertokens-root

View File

@ -8,20 +8,20 @@ jobs:
name: Lint PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true
- uses: amannn/action-semantic-pull-request@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true
changelog:
name: Enforce Changelog
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'
- uses: actions/checkout@v2
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
skipLabels: 'Skip-Changelog'
unit-tests:
name: Run unit tests
uses: ./.github/workflows/unit-test.yml
uses: ./.github/workflows/unit-test.yml

View File

@ -19,8 +19,6 @@ jobs:
id: result
with:
run-for: PR
core-branch: ${{ github.ref_name }}
docker:
name: Docker
runs-on: ubuntu-latest
@ -36,16 +34,16 @@ jobs:
# - mysql
# - mongodb
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
@ -101,4 +99,4 @@ jobs:
context: ./supertokens-root
tags: supertokens/supertokens-dev-${{ matrix.plugin }}:${{ steps.set_tag.outputs.TAG }}
file: ./supertokens-root/supertokens-${{ matrix.plugin }}-plugin/.github/helpers/docker/Dockerfile
platforms: linux/amd64,linux/arm64
platforms: linux/amd64,linux/arm64

View File

@ -10,7 +10,7 @@ on:
jobs:
stress-tests:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Node.js
@ -44,4 +44,4 @@ jobs:
echo "## Stress Test Results" >> $GITHUB_STEP_SUMMARY
echo "| Test | Duration |" >> $GITHUB_STEP_SUMMARY
echo "|------|----------|" >> $GITHUB_STEP_SUMMARY
jq -r '.measurements[] | "| \(.title) | \(.formatted) |"' stress-tests/stats.json >> $GITHUB_STEP_SUMMARY
jq -r '.measurements[] | "| \(.title) | \(.formatted) |"' stress-tests/stats.json >> $GITHUB_STEP_SUMMARY

View File

@ -3,10 +3,13 @@ name: Unit Tests
on:
workflow_call:
env:
total-runners: 12
jobs:
dependency-branches:
name: Dependency Branches
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
outputs:
branches: ${{ steps.result.outputs.branches }}
@ -16,33 +19,45 @@ jobs:
id: result
with:
run-for: PR
core-branch: ${{ github.head_ref }}
test:
name: Unit tests
runner-indexes:
runs-on: ubuntu-latest
name: Generate runner indexes
needs: dependency-branches
outputs:
json: ${{ steps.generate-index-list.outputs.json }}
steps:
- id: generate-index-list
run: |
MAX_INDEX=$((${{ env.total-runners }}-1))
INDEX_LIST=$(seq 0 ${MAX_INDEX})
INDEX_JSON=$(jq --null-input --compact-output '. |= [inputs]' <<< ${INDEX_LIST})
echo "::set-output name=json::${INDEX_JSON}"
unit-tests:
runs-on: ubuntu-latest
name: "Unit tests: ${{ matrix.plugin }} plugin, runner #${{ matrix.runner-index }}"
needs:
- dependency-branches
- runner-indexes
strategy:
fail-fast: false
matrix:
runner-index: ${{ fromjson(needs.runner-indexes.outputs.json) }}
plugin:
- sqlite
- postgresql
# no longer supported
# - mysql
# - mongodb
runs-on: ubuntu-22.04
steps:
- name: Set up JDK 21.0.7
- name: Set up JDK 15.0.1
uses: actions/setup-java@v2
with:
java-version: 21.0.7
java-version: 15.0.1
distribution: zulu
- uses: actions/checkout@v2
with:
repository: supertokens/supertokens-root
path: ./supertokens-root
ref: master
ref: for_jdk_15_releases
- uses: actions/checkout@v2
with:
path: ./supertokens-root/supertokens-core
@ -71,12 +86,33 @@ jobs:
- name: Start ${{ matrix.plugin }} server
if: matrix.plugin != 'sqlite'
run: cd supertokens-root/supertokens-${{ matrix.plugin }}-plugin && ./startDb.sh
- uses: chaosaffe/split-tests@v1-alpha.1
id: split-tests
name: Split tests
with:
glob: 'supertokens-root/*/src/test/java/**/*.java'
split-total: ${{ env.total-runners }}
split-index: ${{ matrix.runner-index }}
- run: 'echo "This runner will execute the following tests: ${{ steps.split-tests.outputs.test-suite }}"'
- name: Run tests
env:
ST_PLUGIN_NAME: ${{ matrix.plugin }}
run: |
cd supertokens-root
./gradlew test
echo "./gradlew test \\" > test.sh
chmod +x test.sh
IFS=' ' read -ra TESTS <<< "${{ steps.split-tests.outputs.test-suite }}"
for test in "${TESTS[@]}"; do
test_name="${test%.java}"
test_name="${test_name#supertokens-root/supertokens-core/src/test/java/}"
test_name="${test_name//\//.}"
echo " --tests $test_name \\" >> test.sh
done
echo "" >> test.sh
echo "this is the test command:"
cat test.sh
echo "--------------------------------"
./test.sh
- name: Publish Test Report
uses: mikepenz/action-junit-report@v5
if: always()
@ -84,4 +120,4 @@ jobs:
report_paths: '**/build/test-results/test/TEST-*.xml'
detailed_summary: true
include_passed: false
annotate_notice: true
annotate_notice: true

6
.gitignore vendored
View File

@ -12,7 +12,6 @@ gradle-app.setting
!cli/jar/**/*.jar
!downloader/jar/**/*.jar
!ee/jar/**/*.jar
!src/main/resources/**/*.jar
*target*
*.war
@ -48,7 +47,4 @@ local.properties
*.iml
ee/bin
addDevTag
addReleaseTag
install-linux.sh
install-windows.bat
addReleaseTag

View File

@ -5,652 +5,18 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres
to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## Unreleased
## [11.3.0]
## [9.1.3]
- Adds SAML features
- Fixes potential deadlock issue with `TelemetryProvider`
- Adds DeadlockLogger as an utility for discovering deadlock issues
- Adds internal opentelemetry support for logging
### Migration
```sql
CREATE TABLE IF NOT EXISTS saml_clients (
app_id VARCHAR(64) NOT NULL DEFAULT 'public',
tenant_id VARCHAR(64) NOT NULL DEFAULT 'public',
client_id VARCHAR(256) NOT NULL,
client_secret TEXT,
sso_login_url TEXT NOT NULL,
redirect_uris TEXT NOT NULL,
default_redirect_uri TEXT NOT NULL,
idp_entity_id VARCHAR(256) NOT NULL,
idp_signing_certificate TEXT NOT NULL,
allow_idp_initiated_login BOOLEAN NOT NULL DEFAULT FALSE,
enable_request_signing BOOLEAN NOT NULL DEFAULT FALSE,
created_at BIGINT NOT NULL,
updated_at BIGINT NOT NULL,
CONSTRAINT saml_clients_pkey PRIMARY KEY(app_id, tenant_id, client_id),
CONSTRAINT saml_clients_idp_entity_id_key UNIQUE (app_id, tenant_id, idp_entity_id),
CONSTRAINT saml_clients_app_id_fkey FOREIGN KEY(app_id) REFERENCES apps (app_id) ON DELETE CASCADE,
CONSTRAINT saml_clients_tenant_id_fkey FOREIGN KEY(app_id, tenant_id) REFERENCES tenants (app_id, tenant_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS saml_clients_app_id_tenant_id_index ON saml_clients (app_id, tenant_id);
CREATE TABLE IF NOT EXISTS saml_relay_state (
app_id VARCHAR(64) NOT NULL DEFAULT 'public',
tenant_id VARCHAR(64) NOT NULL DEFAULT 'public',
relay_state VARCHAR(256) NOT NULL,
client_id VARCHAR(256) NOT NULL,
state TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at BIGINT NOT NULL,
CONSTRAINT saml_relay_state_pkey PRIMARY KEY(app_id, tenant_id, relay_state),
CONSTRAINT saml_relay_state_app_id_fkey FOREIGN KEY(app_id) REFERENCES apps (app_id) ON DELETE CASCADE,
CONSTRAINT saml_relay_state_tenant_id_fkey FOREIGN KEY(app_id, tenant_id) REFERENCES tenants (app_id, tenant_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS saml_relay_state_app_id_tenant_id_index ON saml_relay_state (app_id, tenant_id);
CREATE INDEX IF NOT EXISTS saml_relay_state_expires_at_index ON saml_relay_state (expires_at);
CREATE TABLE IF NOT EXISTS saml_claims (
app_id VARCHAR(64) NOT NULL DEFAULT 'public',
tenant_id VARCHAR(64) NOT NULL DEFAULT 'public',
client_id VARCHAR(256) NOT NULL,
code VARCHAR(256) NOT NULL,
claims TEXT NOT NULL,
created_at BIGINT NOT NULL,
CONSTRAINT saml_claims_pkey PRIMARY KEY(app_id, tenant_id, code),
CONSTRAINT saml_claims_app_id_fkey FOREIGN KEY(app_id) REFERENCES apps (app_id) ON DELETE CASCADE,
CONSTRAINT saml_claims_tenant_id_fkey FOREIGN KEY(app_id, tenant_id) REFERENCES tenants (app_id, tenant_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS saml_claims_app_id_tenant_id_index ON saml_claims (app_id, tenant_id);
CREATE INDEX IF NOT EXISTS saml_claims_expires_at_index ON saml_claims (expires_at);
```
## [11.2.1]
- Fixes deadlock issue with `ResourceDistributor`
- Fixes race issues with Refreshing OAuth token
## [11.2.0]
- Adds opentelemetry-javaagent to the core distribution
## [11.1.1]
- Updates tomcat-embed to 11.0.12 because of security vulnerabilities
## [11.1.0]
- Adds hikari logs to opentelemetry
- Fetches core and plugin config from env
- Open Telemetry configuration is now optional
- Migrates API calls from supertokens.io to supertokens.com
## [11.0.5]
- Adds all logs to telemetry which were logged with `io/supertokens/output/Logging.java`
- Upgrades the embedded tomcat to 11.0.8 because of security vulnerabilities
- Adds back previously removed `implementationDependencies.json`, but now it is generated by the build process
## [11.0.4]
- Fixes user to roles association in bulk import users when the user is not a primary user
## [11.0.3]
- Fixes BatchUpdateException checks and error handling to prevent bulk import users stuck in `PROCESSING` state
- Adds more DEBUG logging to the bulk import users process
## [11.0.2]
- Fixes `AuthRecipe#getUserByAccountInfo` to consider the tenantId instead of the appId when fetching the webauthn user
## [11.0.1]
- Upgrades the embedded tomcat 11.0.6 and logback classic to 1.5.13 because of security vulnerabilities
## [11.0.0]
- Migrates tests to Github Actions
- Updates JRE to 21.
## [10.1.4]
- Fixes bulk migration user roles association when there is no external userId assigned to the user
- Bulk migration now actually uses the `isVerified` field's value in the loginMethod input
- Fixes nullpointer exception in bulk migration error handling in case of null external user id
## [10.1.3]
- Version bumped for re-release
## [10.1.2]
- Adds user_id index to the user roles table
- Adds more debug logging to bulk migration
- Adds more tests to bulk migration
### Migration
If using PostgreSQL, run the following SQL script:
```sql
CREATE INDEX IF NOT EXISTS user_roles_app_id_user_id_index ON user_roles (app_id, user_id);
```
If using MySQL, run the following SQL script:
```sql
CREATE INDEX user_roles_app_id_user_id_index ON user_roles (app_id, user_id);
```
## [10.1.1]
- Adds debug logging for the bulk migration process
- Bulk migration users upload now returns the ids of the users.
- Bulk Migration now requires Account Linking to be enabled only if the input data justifies it
- Speed up Bulk Migration's account linking and primary user making
## [10.1.0]
- Adds Webauthn (Passkeys) support to core
- Adds APIs:
- GET `/recipe/webauthn/user/credential/`
- GET `/recipe/webauthn/user/credential/list`
- GET `/recipe/webauthn/options`
- GET `/recipe/webauthn/user/recover`
- POST `/recipe/webauthn/options/register`
- POST `/recipe/webauthn/options/signin`
- POST `/recipe/webauthn/user/credential/register`
- POST `/recipe/webauthn/signup`
- POST `/recipe/webauthn/signin`
- POST `/recipe/webauthn/user/recover/token`
- POST `/recipe/webauthn/user/recover/token/consume`
- PUT `/recipe/webauthn/user/email`
- DELETE `/recipe/webauthn/user/credential/remove`
- DELETE `/recipe/webauthn/options/remove`
- Adds additional indexing for `emailverification_verified_emails`
- Introduces `bulk_migration_batch_size` core config
- Introduces `BULK_MIGRATION_CRON_ENABLED` environment variable to control the bulk migration cron job
### Migration
If using PostgreSQL, run the following SQL script:
```sql
CREATE INDEX IF NOT EXISTS emailverification_verified_emails_app_id_email_index ON emailverification_verified_emails
(app_id, email);
CREATE TABLE IF NOT EXISTS webauthn_account_recovery_tokens (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
tenant_id VARCHAR(64) DEFAULT 'public' NOT NULL,
user_id CHAR(36) NOT NULL,
email VARCHAR(256) NOT NULL,
token VARCHAR(256) NOT NULL,
expires_at BIGINT NOT NULL,
CONSTRAINT webauthn_account_recovery_token_pkey PRIMARY KEY (app_id, tenant_id, user_id, token),
CONSTRAINT webauthn_account_recovery_token_user_id_fkey FOREIGN KEY (app_id, tenant_id, user_id) REFERENCES
all_auth_recipe_users(app_id, tenant_id, user_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_credentials (
id VARCHAR(256) NOT NULL,
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
rp_id VARCHAR(256) NOT NULL,
user_id CHAR(36),
counter BIGINT NOT NULL,
public_key BYTEA NOT NULL,
transports TEXT NOT NULL,
created_at BIGINT NOT NULL,
updated_at BIGINT NOT NULL,
CONSTRAINT webauthn_credentials_pkey PRIMARY KEY (app_id, rp_id, id),
CONSTRAINT webauthn_credentials_user_id_fkey FOREIGN KEY (app_id, user_id) REFERENCES webauthn_users
(app_id, user_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_generated_options (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
tenant_id VARCHAR(64) DEFAULT 'public'NOT NULL,
id CHAR(36) NOT NULL,
challenge VARCHAR(256) NOT NULL,
email VARCHAR(256),
rp_id VARCHAR(256) NOT NULL,
rp_name VARCHAR(256) NOT NULL,
origin VARCHAR(256) NOT NULL,
expires_at BIGINT NOT NULL,
created_at BIGINT NOT NULL,
user_presence_required BOOLEAN DEFAULT false NOT NULL,
user_verification VARCHAR(12) DEFAULT 'preferred' NOT NULL,
CONSTRAINT webauthn_generated_options_pkey PRIMARY KEY (app_id, tenant_id, id),
CONSTRAINT webauthn_generated_options_tenant_id_fkey FOREIGN KEY (app_id, tenant_id) REFERENCES tenants
(app_id, tenant_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_user_to_tenant (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
tenant_id VARCHAR(64) DEFAULT 'public' NOT NULL,
user_id CHAR(36) NOT NULL,
email VARCHAR(256) NOT NULL,
CONSTRAINT webauthn_user_to_tenant_email_key UNIQUE (app_id, tenant_id, email),
CONSTRAINT webauthn_user_to_tenant_pkey PRIMARY KEY (app_id, tenant_id, user_id),
CONSTRAINT webauthn_user_to_tenant_user_id_fkey FOREIGN KEY (app_id, tenant_id, user_id) REFERENCES
all_auth_recipe_users(app_id, tenant_id, user_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_users (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
user_id CHAR(36) NOT NULL,
email VARCHAR(256) NOT NULL,
rp_id VARCHAR(256) NOT NULL,
time_joined BIGINT NOT NULL,
CONSTRAINT webauthn_users_pkey PRIMARY KEY (app_id, user_id),
CONSTRAINT webauthn_users_user_id_fkey FOREIGN KEY (app_id, user_id) REFERENCES app_id_to_user_id(app_id,
user_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS webauthn_user_to_tenant_email_index ON webauthn_user_to_tenant (app_id, email);
CREATE INDEX IF NOT EXISTS webauthn_user_challenges_expires_at_index ON webauthn_generated_options (app_id, tenant_id, expires_at);
CREATE INDEX IF NOT EXISTS webauthn_credentials_user_id_index ON webauthn_credentials (user_id);
CREATE INDEX IF NOT EXISTS webauthn_account_recovery_token_token_index ON webauthn_account_recovery_tokens (app_id, tenant_id, token);
CREATE INDEX IF NOT EXISTS webauthn_account_recovery_token_expires_at_index ON webauthn_account_recovery_tokens (expires_at DESC);
CREATE INDEX IF NOT EXISTS webauthn_account_recovery_token_email_index ON webauthn_account_recovery_tokens (app_id, tenant_id, email);
```
If using MySQL, run the following SQL script:
```sql
CREATE INDEX emailverification_verified_emails_app_id_email_index ON emailverification_verified_emails
(app_id, email);
CREATE TABLE IF NOT EXISTS webauthn_account_recovery_tokens (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
tenant_id VARCHAR(64) DEFAULT 'public' NOT NULL,
user_id CHAR(36) NOT NULL,
email VARCHAR(256) NOT NULL,
token VARCHAR(256) NOT NULL,
expires_at BIGINT NOT NULL,
CONSTRAINT webauthn_account_recovery_token_pkey PRIMARY KEY (app_id, tenant_id, user_id, token),
CONSTRAINT webauthn_account_recovery_token_user_id_fkey FOREIGN KEY (app_id, tenant_id, user_id) REFERENCES
all_auth_recipe_users(app_id, tenant_id, user_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_credentials (
id VARCHAR(256) NOT NULL,
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
rp_id VARCHAR(256) NOT NULL,
user_id CHAR(36),
counter BIGINT NOT NULL,
public_key BLOB NOT NULL,
transports TEXT NOT NULL,
created_at BIGINT NOT NULL,
updated_at BIGINT NOT NULL,
CONSTRAINT webauthn_credentials_pkey PRIMARY KEY (app_id, rp_id, id),
CONSTRAINT webauthn_credentials_user_id_fkey FOREIGN KEY (app_id, user_id) REFERENCES webauthn_users
(app_id, user_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_generated_options (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
tenant_id VARCHAR(64) DEFAULT 'public'NOT NULL,
id CHAR(36) NOT NULL,
challenge VARCHAR(256) NOT NULL,
email VARCHAR(256),
rp_id VARCHAR(256) NOT NULL,
rp_name VARCHAR(256) NOT NULL,
origin VARCHAR(256) NOT NULL,
expires_at BIGINT NOT NULL,
created_at BIGINT NOT NULL,
user_presence_required BOOLEAN DEFAULT false NOT NULL,
user_verification VARCHAR(12) DEFAULT 'preferred' NOT NULL,
CONSTRAINT webauthn_generated_options_pkey PRIMARY KEY (app_id, tenant_id, id),
CONSTRAINT webauthn_generated_options_tenant_id_fkey FOREIGN KEY (app_id, tenant_id) REFERENCES tenants
(app_id, tenant_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_user_to_tenant (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
tenant_id VARCHAR(64) DEFAULT 'public' NOT NULL,
user_id CHAR(36) NOT NULL,
email VARCHAR(256) NOT NULL,
CONSTRAINT webauthn_user_to_tenant_email_key UNIQUE (app_id, tenant_id, email),
CONSTRAINT webauthn_user_to_tenant_pkey PRIMARY KEY (app_id, tenant_id, user_id),
CONSTRAINT webauthn_user_to_tenant_user_id_fkey FOREIGN KEY (app_id, tenant_id, user_id) REFERENCES
all_auth_recipe_users(app_id, tenant_id, user_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS webauthn_users (
app_id VARCHAR(64) DEFAULT 'public' NOT NULL,
user_id CHAR(36) NOT NULL,
email VARCHAR(256) NOT NULL,
rp_id VARCHAR(256) NOT NULL,
time_joined BIGINT NOT NULL,
CONSTRAINT webauthn_users_pkey PRIMARY KEY (app_id, user_id),
CONSTRAINT webauthn_users_user_id_fkey FOREIGN KEY (app_id, user_id) REFERENCES app_id_to_user_id (app_id,
user_id) ON DELETE CASCADE
);
CREATE INDEX webauthn_user_to_tenant_email_index ON webauthn_user_to_tenant (app_id, email);
CREATE INDEX webauthn_user_challenges_expires_at_index ON webauthn_generated_options (app_id, tenant_id, expires_at);
CREATE INDEX webauthn_credentials_user_id_index ON webauthn_credentials (user_id);
CREATE INDEX webauthn_account_recovery_token_token_index ON webauthn_account_recovery_tokens (app_id, tenant_id, token);
CREATE INDEX webauthn_account_recovery_token_expires_at_index ON webauthn_account_recovery_tokens (expires_at DESC);
CREATE INDEX webauthn_account_recovery_token_email_index ON webauthn_account_recovery_tokens (app_id, tenant_id, email);
```
## [10.0.3]
- Fixes `StorageTransactionLogicException` in bulk import when not using userRoles and totpDevices in import json.
- MFA only required in Bulk Import if it's used in input data
- Fixes issue with reloading all resources when exception occurs while loading a resource, other valid resources were offloaded from the memory. Now we log the exception and continue loading other resources.
- Adds `USE_STRUCTURED_LOGGING` environment variable to control the logging format.
## [10.0.2]
- Fixes `NullPointerException` in user search API.
## [10.0.1]
- Fixes slow queries for account linking
- Masks db password in 500 response
### Migration
If using PostgreSQL, run the following SQL script:
```sql
CREATE INDEX IF NOT EXISTS emailpassword_users_email_index ON emailpassword_users (app_id, email);
CREATE INDEX IF NOT EXISTS emailpassword_user_to_tenant_email_index ON emailpassword_user_to_tenant (app_id, tenant_id, email);
CREATE INDEX IF NOT EXISTS passwordless_users_email_index ON passwordless_users (app_id, email);
CREATE INDEX IF NOT EXISTS passwordless_users_phone_number_index ON passwordless_users (app_id, phone_number);
CREATE INDEX IF NOT EXISTS passwordless_user_to_tenant_email_index ON passwordless_user_to_tenant (app_id, tenant_id, email);
CREATE INDEX IF NOT EXISTS passwordless_user_to_tenant_phone_number_index ON passwordless_user_to_tenant (app_id, tenant_id, phone_number);
CREATE INDEX IF NOT EXISTS thirdparty_user_to_tenant_third_party_user_id_index ON thirdparty_user_to_tenant (app_id, tenant_id, third_party_id, third_party_user_id);
```
If using MySQL, run the following SQL script:
```sql
CREATE INDEX emailpassword_users_email_index ON emailpassword_users (app_id, email);
CREATE INDEX emailpassword_user_to_tenant_email_index ON emailpassword_user_to_tenant (app_id, tenant_id, email);
CREATE INDEX passwordless_users_email_index ON passwordless_users (app_id, email);
CREATE INDEX passwordless_users_phone_number_index ON passwordless_users (app_id, phone_number);
CREATE INDEX passwordless_user_to_tenant_email_index ON passwordless_user_to_tenant (app_id, tenant_id, email);
CREATE INDEX passwordless_user_to_tenant_phone_number_index ON passwordless_user_to_tenant (app_id, tenant_id, phone_number);
CREATE INDEX thirdparty_user_to_tenant_third_party_user_id_index ON thirdparty_user_to_tenant (app_id, tenant_id, third_party_id, third_party_user_id);
```
## [10.0.0]
### Added
- Optimize getUserIdMappingWithEitherSuperTokensUserIdOrExternalUserId query
- Adds property `bulk_migration_parallelism` for fine-tuning the worker threads number
- Adds APIs to bulk import users
- GET `/bulk-import/users`
- POST `/bulk-import/users`
- GET `/bulk-import/users/count`
- POST `/bulk-import/users/remove`
- POST `/bulk-import/users/import`
- Adds `ProcessBulkImportUsers` cron job to process bulk import users
- Adds multithreaded worker support for the `ProcessBulkImportUsers` cron job for faster bulk imports
- Adds support for lazy importing users
### Breaking changes
- Includes CUD in the owner field for OAuth clients
### Fixes
- Fixes issue with user id mapping while refreshing session
- Adds indexing for `session_info` table on `user_id, app_id` columns
### Migrations
For PostgreSQL, run the following SQL script:
```sql
CREATE TABLE IF NOT EXISTS bulk_import_users (
id CHAR(36),
app_id VARCHAR(64) NOT NULL DEFAULT 'public',
primary_user_id VARCHAR(36),
raw_data TEXT NOT NULL,
status VARCHAR(128) DEFAULT 'NEW',
error_msg TEXT,
created_at BIGINT NOT NULL,
updated_at BIGINT NOT NULL,
CONSTRAINT bulk_import_users_pkey PRIMARY KEY(app_id, id),
CONSTRAINT bulk_import_users__app_id_fkey FOREIGN KEY(app_id) REFERENCES apps(app_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS bulk_import_users_status_updated_at_index ON bulk_import_users (app_id, status, updated_at);
CREATE INDEX IF NOT EXISTS bulk_import_users_pagination_index1 ON bulk_import_users (app_id, status, created_at DESC, id DESC);
CREATE INDEX IF NOT EXISTS bulk_import_users_pagination_index2 ON bulk_import_users (app_id, created_at DESC, id DESC);
CREATE INDEX IF NOT EXISTS session_info_user_id_app_id_index ON session_info (user_id, app_id);
```
For MySQL run the following SQL script:
```sql
CREATE TABLE IF NOT EXISTS bulk_import_users (
id CHAR(36),
app_id VARCHAR(64) NOT NULL DEFAULT 'public',
primary_user_id VARCHAR(36),
raw_data TEXT NOT NULL,
status VARCHAR(128) DEFAULT 'NEW',
error_msg TEXT,
created_at BIGINT UNSIGNED NOT NULL,
updated_at BIGINT UNSIGNED NOT NULL,
PRIMARY KEY (app_id, id),
FOREIGN KEY(app_id) REFERENCES apps(app_id) ON DELETE CASCADE
);
CREATE INDEX bulk_import_users_status_updated_at_index ON bulk_import_users (app_id, status, updated_at);
CREATE INDEX bulk_import_users_pagination_index1 ON bulk_import_users (app_id, status, created_at DESC, id DESC);
CREATE INDEX bulk_import_users_pagination_index2 ON bulk_import_users (app_id, created_at DESC, id DESC);
CREATE INDEX session_info_user_id_app_id_index ON session_info (user_id, app_id);
```
## [9.3.1]
- Includes exception class name in 500 error message
## [9.3.0]
### Changes
- Adds support for OAuth2
- Added new feature in license key: `OAUTH`
- Adds new core config:
- `oauth_provider_public_service_url`
- `oauth_provider_admin_service_url`
- `oauth_provider_consent_login_base_url`
- `oauth_provider_url_configured_in_oauth_provider`
- Adds following APIs:
- POST `/recipe/oauth/clients`
- PUT `/recipe/oauth/clients`
- GET `/recipe/oauth/clients`
- GET `/recipe/oauth/clients/list`
- POST `/recipe/oauth/clients/remove`
- GET `/recipe/oauth/auth/requests/consent`
- PUT `/recipe/oauth/auth/requests/consent/accept`
- PUT `/recipe/oauth/auth/requests/consent/reject`
- GET `/recipe/oauth/auth/requests/login`
- PUT `/recipe/oauth/auth/requests/login/accept`
- PUT `/recipe/oauth/auth/requests/login/reject`
- GET `/recipe/oauth/auth/requests/logout`
- PUT `/recipe/oauth/auth/requests/logout/accept`
- PUT `/recipe/oauth/auth/requests/logout/reject`
- POST `/recipe/oauth/auth`
- POST `/recipe/oauth/token`
- POST `/recipe/oauth/introspect`
- POST `/recipe/oauth/session/revoke`
- POST `/recipe/oauth/token/revoke`
- POST `/recipe/oauth/tokens/revoke`
### Migration
If using PostgreSQL, run the following SQL script:
```sql
CREATE TABLE IF NOT EXISTS oauth_clients (
app_id VARCHAR(64),
client_id VARCHAR(255) NOT NULL,
is_client_credentials_only BOOLEAN NOT NULL,
PRIMARY KEY (app_id, client_id),
FOREIGN KEY(app_id) REFERENCES apps(app_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS oauth_sessions (
gid VARCHAR(255),
app_id VARCHAR(64) DEFAULT 'public',
client_id VARCHAR(255) NOT NULL,
session_handle VARCHAR(128),
external_refresh_token VARCHAR(255) UNIQUE,
internal_refresh_token VARCHAR(255) UNIQUE,
jti TEXT NOT NULL,
exp BIGINT NOT NULL,
PRIMARY KEY (gid),
FOREIGN KEY(app_id, client_id) REFERENCES oauth_clients(app_id, client_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS oauth_session_exp_index ON oauth_sessions(exp DESC);
CREATE INDEX IF NOT EXISTS oauth_session_external_refresh_token_index ON oauth_sessions(app_id, external_refresh_token DESC);
CREATE TABLE IF NOT EXISTS oauth_m2m_tokens (
app_id VARCHAR(64) DEFAULT 'public',
client_id VARCHAR(255) NOT NULL,
iat BIGINT NOT NULL,
exp BIGINT NOT NULL,
PRIMARY KEY (app_id, client_id, iat),
FOREIGN KEY(app_id, client_id) REFERENCES oauth_clients(app_id, client_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS oauth_m2m_token_iat_index ON oauth_m2m_tokens(iat DESC, app_id DESC);
CREATE INDEX IF NOT EXISTS oauth_m2m_token_exp_index ON oauth_m2m_tokens(exp DESC);
CREATE TABLE IF NOT EXISTS oauth_logout_challenges (
app_id VARCHAR(64) DEFAULT 'public',
challenge VARCHAR(128) NOT NULL,
client_id VARCHAR(255) NOT NULL,
post_logout_redirect_uri VARCHAR(1024),
session_handle VARCHAR(128),
state VARCHAR(128),
time_created BIGINT NOT NULL,
PRIMARY KEY (app_id, challenge),
FOREIGN KEY(app_id, client_id) REFERENCES oauth_clients(app_id, client_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS oauth_logout_challenges_time_created_index ON oauth_logout_challenges(time_created DESC);
```
If using MySQL, run the following SQL script:
```sql
CREATE TABLE IF NOT EXISTS oauth_clients (
app_id VARCHAR(64),
client_id VARCHAR(255) NOT NULL,
is_client_credentials_only BOOLEAN NOT NULL,
PRIMARY KEY (app_id, client_id),
FOREIGN KEY(app_id) REFERENCES apps(app_id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS oauth_sessions (
gid VARCHAR(255),
app_id VARCHAR(64) DEFAULT 'public',
client_id VARCHAR(255) NOT NULL,
session_handle VARCHAR(128),
external_refresh_token VARCHAR(255) UNIQUE,
internal_refresh_token VARCHAR(255) UNIQUE,
jti TEXT NOT NULL,
exp BIGINT NOT NULL,
PRIMARY KEY (gid),
FOREIGN KEY(app_id, client_id) REFERENCES oauth_clients(app_id, client_id) ON DELETE CASCADE
);
CREATE INDEX oauth_session_exp_index ON oauth_sessions(exp DESC);
CREATE INDEX oauth_session_external_refresh_token_index ON oauth_sessions(app_id, external_refresh_token DESC);
CREATE TABLE oauth_m2m_tokens (
app_id VARCHAR(64) DEFAULT 'public',
client_id VARCHAR(255) NOT NULL,
iat BIGINT UNSIGNED NOT NULL,
exp BIGINT UNSIGNED NOT NULL,
PRIMARY KEY (app_id, client_id, iat),
FOREIGN KEY(app_id, client_id) REFERENCES oauth_clients(app_id, client_id) ON DELETE CASCADE
);
CREATE INDEX oauth_m2m_token_iat_index ON oauth_m2m_tokens(iat DESC, app_id DESC);
CREATE INDEX oauth_m2m_token_exp_index ON oauth_m2m_tokens(exp DESC);
CREATE TABLE IF NOT EXISTS oauth_logout_challenges (
app_id VARCHAR(64) DEFAULT 'public',
challenge VARCHAR(128) NOT NULL,
client_id VARCHAR(255) NOT NULL,
post_logout_redirect_uri VARCHAR(1024),
session_handle VARCHAR(128),
state VARCHAR(128),
time_created BIGINT UNSIGNED NOT NULL,
PRIMARY KEY (app_id, challenge),
FOREIGN KEY(app_id, client_id) REFERENCES oauth_clients(app_id, client_id) ON DELETE CASCADE
);
CREATE INDEX oauth_logout_challenges_time_created_index ON oauth_logout_challenges(time_created ASC, app_id ASC);
```
## [9.2.3] - 2024-10-09
- Adds support for `--with-temp-dir` in CLI and `tempDirLocation=` in Core
- Adds validation to firstFactors and requiredSecondaryFactors names while creating tenants/apps/etc. to not allow
special chars.
## [9.2.2] - 2024-09-04
- Adds index on `last_active_time` for `user_last_active` table to improve the performance of MAU computation.
### Migration
If using PostgreSQL, run the following SQL script:
```sql
CREATE INDEX IF NOT EXISTS user_last_active_last_active_time_index ON user_last_active (last_active_time DESC, app_id DESC);
```
If using MySQL, run the following SQL script:
```sql
CREATE INDEX user_last_active_last_active_time_index ON user_last_active (last_active_time DESC, app_id DESC);
```
## [9.2.1] - 2024-09-02
- Removes the stats that were resulting in high CPU consumption
## [9.2.0] - 2024-08-20
- Adds `SECURITY` feature in `EE_FEATURES`.
## [9.1.2] - 2024-07-24
## [9.1.2] -2024-07-24
- Fixes path routing which rejected tenantId stop words even if it was not an exact stop word match. For example, `/hellotenant` is a valid tenantId prefix, however, it was being rejected for the stop word `hello`. - https://github.com/supertokens/supertokens-core/issues/1021
- 500 errors in core returns actual exception, since these APIs are developer facing, it makes easier to debug these errors.
## [9.1.1] - 2024-07-24
## [9.1.1] -2024-07-24
### Fixes
@ -772,19 +138,17 @@ Make sure the core is already upgraded to version 8.0.0 before migrating
If using PostgreSQL
```sql
ALTER TABLE totp_user_devices
ADD COLUMN IF NOT EXISTS created_at BIGINT default 0;
ALTER TABLE totp_user_devices
ALTER COLUMN created_at DROP DEFAULT;
ALTER TABLE totp_user_devices ADD COLUMN IF NOT EXISTS created_at BIGINT default 0;
ALTER TABLE totp_user_devices
ALTER COLUMN created_at DROP DEFAULT;
```
If using MySQL
```sql
ALTER TABLE totp_user_devices
ADD COLUMN created_at BIGINT UNSIGNED default 0;
ALTER TABLE totp_user_devices
ALTER COLUMN created_at DROP DEFAULT;
ALTER TABLE totp_user_devices ADD COLUMN created_at BIGINT UNSIGNED default 0;
ALTER TABLE totp_user_devices
ALTER COLUMN created_at DROP DEFAULT;
DROP INDEX all_auth_recipe_users_pagination_index2 ON all_auth_recipe_users;
DROP INDEX all_auth_recipe_users_pagination_index4 ON all_auth_recipe_users;
```
@ -836,8 +200,8 @@ For MySQL:
ALTER TABLE user_roles DROP FOREIGN KEY user_roles_ibfk_1;
ALTER TABLE user_roles DROP FOREIGN KEY user_roles_ibfk_2;
ALTER TABLE user_roles
ADD FOREIGN KEY (app_id, tenant_id)
REFERENCES tenants (app_id, tenant_id) ON DELETE CASCADE;
ADD FOREIGN KEY (app_id, tenant_id)
REFERENCES tenants (app_id, tenant_id) ON DELETE CASCADE;
```
## [7.0.18] - 2024-02-19

View File

@ -45,7 +45,7 @@ We're happy to help!:raised_hands:
### Local Setup Prerequisites
- OS: Linux or macOS. Or if using Windows, you need to use [wsl2](https://docs.microsoft.com/en-us/windows/wsl/about).
- JDK: openjdk 21.0.7. Installation instructions for Mac and Linux can be found
- JDK: openjdk 15.0.1. Installation instructions for Mac and Linux can be found
in [our wiki](https://github.com/supertokens/supertokens-core/wiki/Installing-OpenJDK-for-Mac-and-Linux)
- IDE: [IntelliJ](https://www.jetbrains.com/idea/download/)(recommended) or equivalent IDE

View File

@ -81,7 +81,7 @@ We also believe in the principle of least vendor lock-in. Your having full contr
can switch away from SuperTokens without forcing your existing users to logout, reset their passwords, or in the worst
case, sign up again.
### [Click here](https://thirdpartyemailpassword.demo.supertokens.com/) to see the demo app.
### [Click here](https://thirdpartyemailpassword.demo.supertokens.io/) to see the demo app.
- Please visit [our website](https://supertokens.io/pricing) to see the list of features.
- We want to make features as decoupled as possible. This means you can use SuperTokens for just login, or just session
@ -256,7 +256,7 @@ If you think this is a project you could use in the future, please :star2: this
</tr>
<tr>
<td align="center"><a href="https://github.com/Lehoczky"><img src="https://avatars.githubusercontent.com/u/31937175?v=4" width="100px;" alt=""/><br /><sub><b>Lehoczky Zoltán</b></sub></a></td>
<td align="center"><a href="https://github.com/mavwolverine"><img src="https://avatars.githubusercontent.com/u/316111?v=4" width="100px;" alt=""/><br /><sub><b>Viraj Kanwade</b></sub></a></td>
<td align="center"><a href="https://github.com/virajkanwade"><img src="https://avatars.githubusercontent.com/u/316111?v=4" width="100px;" alt=""/><br /><sub><b>Viraj Kanwade</b></sub></a></td>
<td align="center"><a href="https://github.com/anuragmerndev"><img src="https://avatars.githubusercontent.com/u/144275260?v=4" width="100px;" alt=""/><br /><sub><b>Anurag Srivastava</b></sub></a></td>
</tr>
</table>

View File

@ -8,8 +8,6 @@
plugins {
id 'application'
id 'java-library'
id "io.freefair.aspectj" version "8.13" //same as gradle version!
}
compileJava { options.encoding = "UTF-8" }
compileTestJava { options.encoding = "UTF-8" }
@ -21,37 +19,26 @@ compileTestJava { options.encoding = "UTF-8" }
// }
//}
java {
toolchain {
languageVersion.set(JavaLanguageVersion.of(21))
}
}
version = "11.3.0"
version = "9.1.3"
repositories {
mavenCentral()
maven { url 'https://build.shibboleth.net/nexus/content/repositories/releases/' }
}
dependencies {
// https://mvnrepository.com/artifact/com.google.code.gson/gson
// if this changes, remember to also change in the ee folder's build.gradle
implementation group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
implementation group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-yaml
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.18.2'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-cbor
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-cbor', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.16.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
// https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-core
api group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '11.0.12'
implementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '10.1.18'
// https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305
implementation group: 'com.google.code.findbugs', name: 'jsr305', version: '3.0.2'
@ -83,18 +70,9 @@ dependencies {
// https://mvnrepository.com/artifact/com.googlecode.libphonenumber/libphonenumber/
implementation group: 'com.googlecode.libphonenumber', name: 'libphonenumber', version: '8.13.25'
// https://mvnrepository.com/artifact/com.webauthn4j/webauthn4j-core
implementation group: 'com.webauthn4j', name: 'webauthn4j-core', version: '0.28.6.RELEASE'
implementation platform("io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:2.17.0-alpha")
// Open SAML
implementation group: 'org.opensaml', name: 'opensaml-core', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-saml-impl', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-security-impl', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-profile-impl', version: '4.3.1'
implementation group: 'org.opensaml', name: 'opensaml-xmlsec-impl', version: '4.3.1'
implementation("ch.qos.logback:logback-core:1.5.18")
implementation("ch.qos.logback:logback-classic:1.5.18")
@ -106,13 +84,10 @@ dependencies {
implementation("io.opentelemetry.semconv:opentelemetry-semconv")
implementation('org.aspectj:aspectjrt:1.9.24')
compileOnly project(":supertokens-plugin-interface")
testImplementation project(":supertokens-plugin-interface")
// this is so that we can find plugin-interface jar while testing
testImplementation project(":supertokens-plugin-interface")
testImplementation 'junit:junit:4.12'
// https://mvnrepository.com/artifact/org.mockito/mockito-core
@ -123,9 +98,8 @@ dependencies {
testImplementation 'com.tngtech.archunit:archunit-junit4:0.22.0'
// https://mvnrepository.com/artifact/com.webauthn4j/webauthn4j-test
testImplementation group: 'com.webauthn4j', name: 'webauthn4j-test', version: '0.28.6.RELEASE'
}
application {
mainClass.set("io.supertokens.Main")
}
@ -135,47 +109,43 @@ jar {
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
test {
jvmArgs = ['-Djava.security.egd=file:/dev/urandom',
"--add-opens=java.base/java.lang=ALL-UNNAMED",
"--add-opens=java.base/java.util=ALL-UNNAMED",
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED"]
jvmArgs '-Djava.security.egd=file:/dev/urandom'
testLogging {
outputs.upToDateWhen { false }
showStandardStreams = true
}
maxParallelForks = Runtime.runtime.availableProcessors()
}
import org.gradle.api.tasks.testing.logging.TestExceptionFormat
import org.gradle.api.tasks.testing.logging.TestLogEvent
tasks.withType(Test).configureEach {
tasks.withType(Test) {
testLogging {
// set options for log level LIFECYCLE
events = [TestLogEvent.FAILED,
events TestLogEvent.FAILED,
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT]
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
TestLogEvent.STANDARD_OUT
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
events = [TestLogEvent.STARTED,
events TestLogEvent.STARTED,
TestLogEvent.FAILED,
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT]
exceptionFormat = TestExceptionFormat.FULL
TestLogEvent.STANDARD_OUT
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

View File

@ -4,8 +4,6 @@ plugins {
repositories {
mavenCentral()
maven { url 'https://build.shibboleth.net/nexus/content/repositories/releases/' }
}
application {
@ -18,13 +16,13 @@ jar {
dependencies {
// https://mvnrepository.com/artifact/com.google.code.gson/gson
implementation group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
implementation group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-yaml
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.16.1'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.18.2'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
// https://mvnrepository.com/artifact/de.mkammerer/argon2-jvm
implementation group: 'de.mkammerer', name: 'argon2-jvm', version: '2.11'
@ -35,9 +33,9 @@ dependencies {
testImplementation group: 'junit', name: 'junit', version: '4.12'
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
test {
@ -57,10 +55,10 @@ tasks.withType(Test) {
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
@ -70,7 +68,7 @@ tasks.withType(Test) {
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

View File

@ -1,40 +1,55 @@
{
"_comment": "Contains list of implementation dependencies URL for this project. This is a generated file, don't modify the contents by hand.",
"list": [
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1.jar",
"name":"gson 2.13.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0.jar",
"name":"error_prone_annotations 2.38.0",
"src":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2.jar",
"name":"jackson-dataformat-yaml 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3.jar",
"name":"snakeyaml 2.3",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2.jar",
"name":"jackson-databind 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name":"argon2-jvm 2.11",
"src":"https://repo.maven.apache.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name":"jbcrypt 0.4",
"src":"https://repo.maven.apache.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
}
]
"_comment": "Contains list of implementation dependencies URL for this project",
"list": [
{
"jar": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name": "Gson 2.3.1",
"src": "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name": "Jackson Dataformat 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name": "SnakeYAML 2.2",
"src": "https://repo1.maven.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1.jar",
"name": "Jackson core 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.16.1/jackson-core-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name": "Jackson databind 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1.jar",
"name": "Jackson annotation 2.16.1",
"src": "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.16.1/jackson-annotations-2.16.1-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11.jar",
"name": "Argon2-jvm 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm/2.11/argon2-jvm-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11.jar",
"name": "Argon2-jvm no libs 2.11",
"src": "https://repo1.maven.org/maven2/de/mkammerer/argon2-jvm-nolibs/2.11/argon2-jvm-nolibs-2.11-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4.jar",
"name": "SQLite JDBC Driver 3.30.1",
"src": "https://repo1.maven.org/maven2/org/mindrot/jbcrypt/0.4/jbcrypt-0.4-sources.jar"
},
{
"jar": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0.jar",
"name": "JNA 5.8.0",
"src": "https://repo1.maven.org/maven2/net/java/dev/jna/jna/5.8.0/jna-5.8.0-sources.jar"
}
]
}

Binary file not shown.

View File

@ -35,7 +35,6 @@ public class StartHandler extends CommandHandler {
public void doCommand(String installationDir, boolean viaInstaller, String[] args) {
String space = CLIOptionsParser.parseOption("--with-space", args);
String configPath = CLIOptionsParser.parseOption("--with-config", args);
String tempDirLocation = CLIOptionsParser.parseOption("--with-temp-dir", args);
if (configPath != null) {
configPath = new File(configPath).getAbsolutePath();
}
@ -43,38 +42,12 @@ public class StartHandler extends CommandHandler {
String host = CLIOptionsParser.parseOption("--host", args);
boolean foreground = CLIOptionsParser.hasKey("--foreground", args);
boolean forceNoInMemDB = CLIOptionsParser.hasKey("--no-in-mem-db", args);
boolean javaagentEnabled = CLIOptionsParser.hasKey("--javaagent", args);
boolean jmxEnabled = CLIOptionsParser.hasKey("--jmx", args);
String jmxPort = CLIOptionsParser.parseOption("--jmx-port", args);
String jmxAuthenticate = CLIOptionsParser.parseOption("--jmx-authenticate", args);
String jmxSSL = CLIOptionsParser.parseOption("--jmx-ssl", args);
List<String> commands = new ArrayList<>();
if (OperatingSystem.getOS() == OperatingSystem.OS.WINDOWS) {
commands.add(installationDir + "jre\\bin\\java.exe");
commands.add("-classpath");
commands.add("\"" + installationDir + "core\\*\";\"" + installationDir + "plugin-interface\\*\"");
if (javaagentEnabled) {
commands.add("-javaagent:\"" + installationDir + "agent\\opentelemetry-javaagent.jar\"");
}
if (jmxEnabled) {
commands.add("-Dcom.sun.management.jmxremote");
if (jmxPort != null) {
commands.add("-Dcom.sun.management.jmxremote.port=" + jmxPort);
} else {
commands.add("-Dcom.sun.management.jmxremote.port=9010");
}
if (jmxAuthenticate != null) {
commands.add("-Dcom.sun.management.jmxremote.authenticate=" + jmxAuthenticate);
} else {
commands.add("-Dcom.sun.management.jmxremote.authenticate=false");
}
if (jmxSSL != null) {
commands.add("-Dcom.sun.management.jmxremote.ssl=" + jmxSSL);
} else {
commands.add("-Dcom.sun.management.jmxremote.ssl=false");
}
}
if (space != null) {
commands.add("-Xmx" + space + "M");
}
@ -94,36 +67,12 @@ public class StartHandler extends CommandHandler {
if (forceNoInMemDB) {
commands.add("forceNoInMemDB=true");
}
if(tempDirLocation != null && !tempDirLocation.isEmpty()) {
commands.add("tempDirLocation=" + tempDirLocation);
}
} else {
commands.add(installationDir + "jre/bin/java");
commands.add("-Djava.security.egd=file:/dev/urandom");
commands.add("-classpath");
commands.add(
installationDir + "core/*:" + installationDir + "plugin-interface/*:" + installationDir + "ee/*");
if (javaagentEnabled) {
commands.add("-javaagent:" + installationDir + "agent/opentelemetry-javaagent.jar");
}
if (jmxEnabled) {
commands.add("-Dcom.sun.management.jmxremote");
if (jmxPort != null) {
commands.add("-Dcom.sun.management.jmxremote.port=" + jmxPort);
} else {
commands.add("-Dcom.sun.management.jmxremote.port=9010");
}
if (jmxAuthenticate != null) {
commands.add("-Dcom.sun.management.jmxremote.authenticate=" + jmxAuthenticate);
} else {
commands.add("-Dcom.sun.management.jmxremote.authenticate=false");
}
if (jmxSSL != null) {
commands.add("-Dcom.sun.management.jmxremote.ssl=" + jmxSSL);
} else {
commands.add("-Dcom.sun.management.jmxremote.ssl=false");
}
}
if (space != null) {
commands.add("-Xmx" + space + "M");
}
@ -141,14 +90,10 @@ public class StartHandler extends CommandHandler {
if (forceNoInMemDB) {
commands.add("forceNoInMemDB=true");
}
if(tempDirLocation != null && !tempDirLocation.isEmpty()) {
commands.add("tempDirLocation=" + tempDirLocation);
}
}
if (!foreground) {
try {
ProcessBuilder pb = new ProcessBuilder(commands);
Logging.info("Command to be run: " + String.join(" ", pb.command()));
pb.redirectErrorStream(true);
Process process = pb.start();
try (InputStreamReader in = new InputStreamReader(process.getInputStream());
@ -227,15 +172,6 @@ public class StartHandler extends CommandHandler {
"Sets the host on which this instance of SuperTokens should run. Example: \"--host=192.168.0.1\""));
options.add(
new Option("--foreground", "Runs this instance of SuperTokens in the foreground (not as a daemon)"));
options.add(
new Option("--with-temp-dir", "Uses the passed dir as temp dir, instead of the internal default."));
options.add(new Option("--javaagent", "Enables the OpenTelemetry Javaagent for tracing and metrics."));
options.add(new Option("--jmx", "Enables JMX management and monitoring."));
options.add(new Option("--jmx-port", "Sets the port for JMX. Defaults to 9010 if --jmx is passed."));
options.add(new Option("--jmx-authenticate",
"Sets whether JMX authentication is enabled or not. Defaults to false if --jmx is passed."));
options.add(new Option("--jmx-ssl",
"Sets whether JMX SSL is enabled or not. Defaults to false if --jmx is passed."));
return options;
}

View File

@ -152,52 +152,6 @@ core_config_version: 0
# if there are more CUDs in the database and block all other CUDs from being used from this instance.
# supertokens_saas_load_only_cud:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider
# public service.
# oauth_provider_public_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider admin
# service.
# oauth_provider_admin_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to replace the default
# consent and login URLs to {apiDomain}.
# oauth_provider_consent_login_base_url:
# (OPTIONAL | Default: oauth_provider_public_service_url) If specified, the core uses this URL to parse responses from
# the oauth provider when the oauth provider's internal address differs from the known public provider address.
# oauth_provider_url_configured_in_oauth_provider:
# (Optional | Default: null) string value. The encryption key used for saving OAuth client secret on the database.
# oauth_client_secret_encryption_key:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: number of available processor cores) int value. If specified,
# the supertokens core will use the specified number of threads to complete the migration of users.
# bulk_migration_parallelism:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 8000) int value. If specified, the supertokens core will load the
# specified number of users for migrating in one single batch.
# bulk_migration_batch_size:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 3600000) long value. Time in milliseconds for how long a webauthn
# account recovery token is valid for.
# webauthn_recover_account_token_lifetime:
# (OPTIONAL | Default: null) string value. The URL of the OpenTelemetry collector to which the core
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:
# (OPTIONAL | Default: false) boolean value. Enables or disables the deadlock logger.
# deadlock_logger_enable:
# (OPTIONAL | Default: null) string value. If specified, uses this URL as ACS URL for handling legacy SAML clients
# saml_legacy_acs_url:
# (OPTIONAL | Default: https://saml.supertokens.com) string value. Service provider's entity ID.
# saml_sp_entity_id:
# OPTIONAL | Default: 300000) long value. Duration for which SAML claims will be valid before it is consumed
# saml_claims_validity:
# OPTIONAL | Default: 300000) long value. Duration for which SAML relay state will be valid before it is consumed
# saml_relay_state_validity:

View File

@ -20,9 +20,6 @@
"3.1",
"4.0",
"5.0",
"5.1",
"5.2",
"5.3",
"5.4"
"5.1"
]
}

View File

@ -152,52 +152,6 @@ disable_telemetry: true
# if there are more CUDs in the database and block all other CUDs from being used from this instance.
# supertokens_saas_load_only_cud:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider
# public service.
# oauth_provider_public_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to connect to the OAuth provider admin
# service.
# oauth_provider_admin_service_url:
# (OPTIONAL | Default: null) string value. If specified, the core uses this URL to replace the default
# consent and login URLs to {apiDomain}.
# oauth_provider_consent_login_base_url:
# (OPTIONAL | Default: oauth_provider_public_service_url) If specified, the core uses this URL to parse responses from
# the oauth provider when the oauth provider's internal address differs from the known public provider address.
# oauth_provider_url_configured_in_oauth_provider:
# (Optional | Default: null) string value. The encryption key used for saving OAuth client secret on the database.
# oauth_client_secret_encryption_key:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: number of available processor cores) int value. If specified,
# the supertokens core will use the specified number of threads to complete the migration of users.
# bulk_migration_parallelism:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 8000) int value. If specified, the supertokens core will load the
# specified number of users for migrating in one single batch.
# bulk_migration_batch_size:
# (DIFFERENT_ACROSS_APPS | OPTIONAL | Default: 3600000) long value. Time in milliseconds for how long a webauthn
# account recovery token is valid for.
# webauthn_recover_account_token_lifetime:
# (OPTIONAL | Default: null) string value. The URL of the OpenTelemetry collector to which the core
# (OPTIONAL | Default: http://localhost:4317) string value. The URL of the OpenTelemetry collector to which the core
# will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.
# otel_collector_connection_uri:
# (OPTIONAL | Default: false) boolean value. Enables or disables the deadlock logger.
# deadlock_logger_enable:
# (OPTIONAL | Default: null) string value. If specified, uses this URL as ACS URL for handling legacy SAML clients
saml_legacy_acs_url: "http://localhost:5225/api/oauth/saml"
# (OPTIONAL | Default: https://saml.supertokens.com) string value. Service provider's entity ID.
# saml_sp_entity_id:
# OPTIONAL | Default: 300000) long value. Duration for which SAML claims will be valid before it is consumed
# saml_claims_validity:
# OPTIONAL | Default: 300000) long value. Duration for which SAML relay state will be valid before it is consumed
# saml_relay_state_validity:

View File

@ -18,9 +18,9 @@ dependencies {
testImplementation group: 'junit', name: 'junit', version: '4.12'
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
test {
@ -56,10 +56,10 @@ tasks.withType(Test) {
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
@ -69,7 +69,7 @@ tasks.withType(Test) {
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

Binary file not shown.

View File

@ -14,7 +14,6 @@ exitIfNeeded
exitIfNeeded
(cd ../../ && ./gradlew :$prefix-core:downloader:copyJars < /dev/null)
exitIfNeeded

View File

@ -2,12 +2,10 @@ plugins {
id 'java-library'
}
version = 'unspecified'
version 'unspecified'
repositories {
mavenCentral()
maven { url 'https://build.shibboleth.net/nexus/content/repositories/releases/' }
}
jar {
@ -15,7 +13,7 @@ jar {
}
dependencies {
compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
compileOnly project(":supertokens-plugin-interface")
testImplementation project(":supertokens-plugin-interface")
@ -37,13 +35,13 @@ dependencies {
testImplementation group: 'org.mockito', name: 'mockito-core', version: '3.1.0'
// https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-core
testImplementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '11.0.5'
testImplementation group: 'org.apache.tomcat.embed', name: 'tomcat-embed-core', version: '10.1.18'
// https://mvnrepository.com/artifact/ch.qos.logback/logback-classic
testImplementation group: 'ch.qos.logback', name: 'logback-classic', version: '1.5.13'
testImplementation group: 'ch.qos.logback', name: 'logback-classic', version: '1.4.14'
// https://mvnrepository.com/artifact/com.google.code.gson/gson
testImplementation group: 'com.google.code.gson', name: 'gson', version: '2.13.1'
testImplementation group: 'com.google.code.gson', name: 'gson', version: '2.3.1'
testImplementation 'com.tngtech.archunit:archunit-junit4:0.22.0'
@ -54,18 +52,17 @@ dependencies {
testImplementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.16.1'
testImplementation group: 'org.jetbrains', name: 'annotations', version: '13.0'
}
tasks.register('copyJars', Copy) {
task copyJars(type: Copy) {
into "$buildDir/dependencies"
from configurations.runtimeClasspath
into layout.buildDirectory.dir("dependencies")
}
def interfaceName = "io.supertokens.featureflag.EEFeatureFlagInterface"
def className = "io.supertokens.ee.EEFeatureFlag"
tasks.register('generateMetaInf') {
task generateMetaInf {
doFirst {
mkdir "src/main/resources/META-INF/services"
file("src/main/resources/META-INF/services/${interfaceName}").text = "${className}"
@ -92,10 +89,10 @@ tasks.withType(Test) {
TestLogEvent.PASSED,
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
exceptionFormat TestExceptionFormat.FULL
showExceptions true
showCauses true
showStackTraces true
// set options for log level DEBUG and INFO
debug {
@ -105,7 +102,7 @@ tasks.withType(Test) {
TestLogEvent.SKIPPED,
TestLogEvent.STANDARD_ERROR,
TestLogEvent.STANDARD_OUT
exceptionFormat = TestExceptionFormat.FULL
exceptionFormat TestExceptionFormat.FULL
}
info.events = debug.events
info.exceptionFormat = debug.exceptionFormat

Binary file not shown.

View File

@ -14,7 +14,6 @@ exitIfNeeded
exitIfNeeded
(cd ../../ && ./gradlew :$prefix-core:ee:copyJars < /dev/null)
exitIfNeeded

View File

@ -24,7 +24,6 @@ import io.supertokens.pluginInterface.ActiveUsersStorage;
import io.supertokens.pluginInterface.KeyValueInfo;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeStorage;
import io.supertokens.pluginInterface.dashboard.sqlStorage.DashboardSQLStorage;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
@ -33,8 +32,6 @@ import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.ThirdPartyConfig;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.oauth.OAuthStorage;
import io.supertokens.pluginInterface.saml.SAMLStorage;
import io.supertokens.pluginInterface.session.sqlStorage.SessionSQLStorage;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.utils.Utils;
@ -203,34 +200,29 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
// TODO: Active users are present only on public tenant and MFA users may be
// present on different storages
JsonObject result = new JsonObject();
Storage[] storages = StorageLayer.getStoragesForApp(main, this.appIdentifier);
// Commenting out these stats for now as they are very CPU intensive and reduces the performance
// of other API calls while this is running.
// Also, we are not currently using these stats.
int totalUserCountWithMoreThanOneLoginMethod = 0;
int[] maus = new int[31];
// Storage[] storages = StorageLayer.getStoragesForApp(main, this.appIdentifier);
long now = System.currentTimeMillis();
// int totalUserCountWithMoreThanOneLoginMethod = 0;
// int[] maus = new int[31];
for (Storage storage : storages) {
totalUserCountWithMoreThanOneLoginMethod += ((AuthRecipeStorage) storage)
.getUsersCountWithMoreThanOneLoginMethodOrTOTPEnabled(this.appIdentifier);
// long now = System.currentTimeMillis();
for (int i = 1; i <= 31; i++) {
long timestamp = now - (i * 24 * 60 * 60 * 1000L);
// for (Storage storage : storages) {
// totalUserCountWithMoreThanOneLoginMethod += ((AuthRecipeStorage) storage)
// .getUsersCountWithMoreThanOneLoginMethodOrTOTPEnabled(this.appIdentifier);
// `maus[i-1]` since i starts from 1
maus[i - 1] += ((ActiveUsersStorage) storage)
.countUsersThatHaveMoreThanOneLoginMethodOrTOTPEnabledAndActiveSince(appIdentifier, timestamp);
}
}
// for (int i = 1; i <= 31; i++) {
// long timestamp = now - (i * 24 * 60 * 60 * 1000L);
// // `maus[i-1]` since i starts from 1
// maus[i - 1] += ((ActiveUsersStorage) storage)
// .countUsersThatHaveMoreThanOneLoginMethodOrTOTPEnabledAndActiveSince(appIdentifier, timestamp);
// }
// }
// result.addProperty("totalUserCountWithMoreThanOneLoginMethodOrTOTPEnabled",
// totalUserCountWithMoreThanOneLoginMethod);
// result.add("mauWithMoreThanOneLoginMethodOrTOTPEnabled", new Gson().toJsonTree(maus));
result.addProperty("totalUserCountWithMoreThanOneLoginMethodOrTOTPEnabled",
totalUserCountWithMoreThanOneLoginMethod);
result.add("mauWithMoreThanOneLoginMethodOrTOTPEnabled", new Gson().toJsonTree(maus));
return result;
}
@ -313,63 +305,36 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
}
result.addProperty("usesAccountLinking", usesAccountLinking);
if (!usesAccountLinking) {
result.addProperty("totalUserCountWithMoreThanOneLoginMethod", 0);
JsonArray mauArray = new JsonArray();
for (int i = 0; i < 31; i++) {
mauArray.add(new JsonPrimitive(0));
}
result.add("mauWithMoreThanOneLoginMethod", mauArray);
return result;
}
// Commenting out these stats for now as they are very CPU intensive and reduces the performance
// of other API calls while this is running.
// Also, we are not currently using these stats.
// if (!usesAccountLinking) {
// result.addProperty("totalUserCountWithMoreThanOneLoginMethod", 0);
// JsonArray mauArray = new JsonArray();
// for (int i = 0; i < 31; i++) {
// mauArray.add(new JsonPrimitive(0));
// }
// result.add("mauWithMoreThanOneLoginMethod", mauArray);
// return result;
// }
// int totalUserCountWithMoreThanOneLoginMethod = 0;
// int[] maus = new int[31];
// long now = System.currentTimeMillis();
// for (Storage storage : storages) {
// totalUserCountWithMoreThanOneLoginMethod += ((AuthRecipeStorage) storage).getUsersCountWithMoreThanOneLoginMethod(
// this.appIdentifier);
// for (int i = 1; i <= 31; i++) {
// long timestamp = now - (i * 24 * 60 * 60 * 1000L);
// // `maus[i-1]` because i starts from 1
// maus[i - 1] += ((ActiveUsersStorage) storage).countUsersThatHaveMoreThanOneLoginMethodAndActiveSince(
// appIdentifier, timestamp);
// }
// }
// result.addProperty("totalUserCountWithMoreThanOneLoginMethod", totalUserCountWithMoreThanOneLoginMethod);
// result.add("mauWithMoreThanOneLoginMethod", new Gson().toJsonTree(maus));
return result;
}
private JsonObject getOAuthStats() throws StorageQueryException, TenantOrAppNotFoundException {
JsonObject result = new JsonObject();
OAuthStorage oAuthStorage = StorageUtils.getOAuthStorage(StorageLayer.getStorage(
this.appIdentifier.getAsPublicTenantIdentifier(), main));
result.addProperty("totalNumberOfClients", oAuthStorage.countTotalNumberOfOAuthClients(appIdentifier));
result.addProperty("numberOfClientCredentialsOnlyClients", oAuthStorage.countTotalNumberOfClientCredentialsOnlyOAuthClients(appIdentifier));
result.addProperty("numberOfM2MTokensAlive", oAuthStorage.countTotalNumberOfOAuthM2MTokensAlive(appIdentifier));
int totalUserCountWithMoreThanOneLoginMethod = 0;
int[] maus = new int[31];
long now = System.currentTimeMillis();
JsonArray tokensCreatedArray = new JsonArray();
for (int i = 1; i <= 31; i++) {
long timestamp = now - (i * 24 * 60 * 60 * 1000L);
int numberOfTokensCreated = oAuthStorage.countTotalNumberOfOAuthM2MTokensCreatedSince(this.appIdentifier, timestamp);
tokensCreatedArray.add(new JsonPrimitive(numberOfTokensCreated));
}
result.add("numberOfM2MTokensCreated", tokensCreatedArray);
for (Storage storage : storages) {
totalUserCountWithMoreThanOneLoginMethod += ((AuthRecipeStorage) storage).getUsersCountWithMoreThanOneLoginMethod(
this.appIdentifier);
for (int i = 1; i <= 31; i++) {
long timestamp = now - (i * 24 * 60 * 60 * 1000L);
// `maus[i-1]` because i starts from 1
maus[i - 1] += ((ActiveUsersStorage) storage).countUsersThatHaveMoreThanOneLoginMethodAndActiveSince(
appIdentifier, timestamp);
}
}
result.addProperty("totalUserCountWithMoreThanOneLoginMethod", totalUserCountWithMoreThanOneLoginMethod);
result.add("mauWithMoreThanOneLoginMethod", new Gson().toJsonTree(maus));
return result;
}
@ -387,34 +352,6 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
return mauArr;
}
private JsonObject getSAMLStats() throws TenantOrAppNotFoundException, StorageQueryException {
JsonObject stats = new JsonObject();
stats.addProperty("connectionUriDomain", this.appIdentifier.getConnectionUriDomain());
stats.addProperty("appId", this.appIdentifier.getAppId());
JsonArray tenantStats = new JsonArray();
TenantConfig[] tenantConfigs = Multitenancy.getAllTenantsForApp(this.appIdentifier, main);
for (TenantConfig tenantConfig : tenantConfigs) {
JsonObject tenantStat = new JsonObject();
tenantStat.addProperty("tenantId", tenantConfig.tenantIdentifier.getTenantId());
{
Storage storage = StorageLayer.getStorage(tenantConfig.tenantIdentifier, main);
SAMLStorage samlStorage = StorageUtils.getSAMLStorage(storage);
JsonObject stat = new JsonObject();
stat.addProperty("numberOfSAMLClients", samlStorage.countSAMLClients(tenantConfig.tenantIdentifier));
stat.add(tenantConfig.tenantIdentifier.getTenantId(), stat);
}
}
stats.add("tenants", tenantStats);
return stats;
}
@Override
public JsonObject getPaidFeatureStats() throws StorageQueryException, TenantOrAppNotFoundException {
JsonObject usageStats = new JsonObject();
@ -454,18 +391,6 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
if (feature == EE_FEATURES.ACCOUNT_LINKING) {
usageStats.add(EE_FEATURES.ACCOUNT_LINKING.toString(), getAccountLinkingStats());
}
if (feature == EE_FEATURES.SECURITY) {
usageStats.add(EE_FEATURES.SECURITY.toString(), new JsonObject());
}
if (feature == EE_FEATURES.OAUTH) {
usageStats.add(EE_FEATURES.OAUTH.toString(), getOAuthStats());
}
if (feature == EE_FEATURES.SAML) {
usageStats.add(EE_FEATURES.SAML.toString(), getSAMLStats());
}
}
usageStats.add("maus", getMAUs());
@ -556,7 +481,7 @@ public class EEFeatureFlag implements io.supertokens.featureflag.EEFeatureFlagIn
ProcessState.getInstance(main)
.addState(ProcessState.PROCESS_STATE.LICENSE_KEY_CHECK_NETWORK_CALL, null, json);
JsonObject licenseCheckResponse = HttpRequest.sendJsonPOSTRequest(this.main, REQUEST_ID,
"https://api.supertokens.com/0/st/license/check",
"https://api.supertokens.io/0/st/license/check",
json, 10000, 10000, 0);
if (licenseCheckResponse.get("status").getAsString().equalsIgnoreCase("OK")) {
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(), "API returned OK");

View File

@ -44,14 +44,14 @@ public class TestMultitenancyStats {
String[] args = {"../../"};
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
CronTaskTest.getInstance(process.getProcess()).setIntervalInSeconds(EELicenseCheck.RESOURCE_KEY, 1);
CronTaskTest.getInstance(process.main).setIntervalInSeconds(EELicenseCheck.RESOURCE_KEY, 1);
Assert.assertNotNull(process.checkOrWaitForEvent(ProcessState.PROCESS_STATE.STARTED));
if (StorageLayer.getStorage(process.getProcess()).getType() != STORAGE_TYPE.SQL) {
return;
}
if (StorageLayer.isInMemDb(process.getProcess())) {
if (StorageLayer.isInMemDb(process.main)) {
// cause we keep all features enabled in memdb anyway
return;
}

View File

@ -16,7 +16,7 @@ public class TestingProcessManager {
String[] args = {"../../"};
TestingProcess process = TestingProcessManager.start(args);
process.checkOrWaitForEvent(PROCESS_STATE.STARTED);
process.getProcess().deleteAllInformationForTesting();
process.main.deleteAllInformationForTesting();
process.kill();
System.out.println("----------DELETE ALL INFORMATION----------");
}

View File

@ -24,8 +24,7 @@ public abstract class Utils extends Mockito {
try {
// remove config.yaml file
String workerId = System.getProperty("org.gradle.test.worker", "");
ProcessBuilder pb = new ProcessBuilder("rm", "config" + workerId + ".yaml");
ProcessBuilder pb = new ProcessBuilder("rm", "config.yaml");
pb.directory(new File(installDir));
Process process = pb.start();
process.waitFor();
@ -59,8 +58,7 @@ public abstract class Utils extends Mockito {
// if the default config is not the same as the current config, we must reset the storage layer
File ogConfig = new File("../../temp/config.yaml");
String workerId = System.getProperty("org.gradle.test.worker", "");
File currentConfig = new File("../../config" + workerId + ".yaml");
File currentConfig = new File("../../config.yaml");
if (currentConfig.isFile()) {
byte[] ogConfigContent = Files.readAllBytes(ogConfig.toPath());
byte[] currentConfigContent = Files.readAllBytes(currentConfig.toPath());
@ -69,7 +67,7 @@ public abstract class Utils extends Mockito {
}
}
ProcessBuilder pb = new ProcessBuilder("cp", "temp/config.yaml", "./config" + workerId + ".yaml");
ProcessBuilder pb = new ProcessBuilder("cp", "temp/config.yaml", "./config.yaml");
pb.directory(new File(installDir));
Process process = pb.start();
process.waitFor();
@ -98,15 +96,14 @@ public abstract class Utils extends Mockito {
String newStr = "\n# " + key + ":";
StringBuilder originalFileContent = new StringBuilder();
String workerId = System.getProperty("org.gradle.test.worker", "");
try (BufferedReader reader = new BufferedReader(new FileReader("../../config" + workerId + ".yaml"))) {
try (BufferedReader reader = new BufferedReader(new FileReader("../../config.yaml"))) {
String currentReadingLine = reader.readLine();
while (currentReadingLine != null) {
originalFileContent.append(currentReadingLine).append(System.lineSeparator());
currentReadingLine = reader.readLine();
}
String modifiedFileContent = originalFileContent.toString().replaceAll(oldStr, newStr);
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config" + workerId + ".yaml"))) {
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config.yaml"))) {
writer.write(modifiedFileContent);
}
}
@ -120,15 +117,14 @@ public abstract class Utils extends Mockito {
String oldStr = "\n((#\\s)?)" + key + "(:|((:\\s).+))\n";
String newStr = "\n" + key + ": " + value + "\n";
StringBuilder originalFileContent = new StringBuilder();
String workerId = System.getProperty("org.gradle.test.worker", "");
try (BufferedReader reader = new BufferedReader(new FileReader("../../config" + workerId + ".yaml"))) {
try (BufferedReader reader = new BufferedReader(new FileReader("../../config.yaml"))) {
String currentReadingLine = reader.readLine();
while (currentReadingLine != null) {
originalFileContent.append(currentReadingLine).append(System.lineSeparator());
currentReadingLine = reader.readLine();
}
String modifiedFileContent = originalFileContent.toString().replaceAll(oldStr, newStr);
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config" + workerId + ".yaml"))) {
try (BufferedWriter writer = new BufferedWriter(new FileWriter("../../config.yaml"))) {
writer.write(modifiedFileContent);
}
}

View File

@ -45,7 +45,7 @@ public class DeleteLicenseKeyAPITest {
// check that no LicenseKey exits
try {
FeatureFlag.getInstance(process.getProcess()).getLicenseKey();
FeatureFlag.getInstance(process.main).getLicenseKey();
fail();
} catch (NoLicenseKeyFoundException ignored) {
}
@ -58,7 +58,7 @@ public class DeleteLicenseKeyAPITest {
// check that no LicenseKey exits
try {
FeatureFlag.getInstance(process.getProcess()).getLicenseKey();
FeatureFlag.getInstance(process.main).getLicenseKey();
fail();
} catch (NoLicenseKeyFoundException ignored) {
}
@ -90,7 +90,7 @@ public class DeleteLicenseKeyAPITest {
// check that no LicenseKey exits
try {
FeatureFlag.getInstance(process.getProcess()).getLicenseKey();
FeatureFlag.getInstance(process.main).getLicenseKey();
fail();
} catch (NoLicenseKeyFoundException ignored) {
}

View File

@ -38,7 +38,7 @@ public class GetFeatureFlagAPITest {
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
Assert.assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
if (StorageLayer.isInMemDb(process.getProcess())) {
if (StorageLayer.isInMemDb(process.main)) {
// cause we keep all features enabled in memdb anyway
return;
}
@ -72,7 +72,7 @@ public class GetFeatureFlagAPITest {
TestingProcessManager.TestingProcess process = TestingProcessManager.start(args);
Assert.assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
if (StorageLayer.isInMemDb(process.getProcess())) {
if (StorageLayer.isInMemDb(process.main)) {
// cause we keep all features enabled in memdb anyway
return;
}

View File

@ -85,7 +85,7 @@ public class GetLicenseKeyAPITest {
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
Assert.assertNull(FeatureFlag.getInstance(process.getProcess()).getEeFeatureFlagInstance());
Assert.assertNull(FeatureFlag.getInstance(process.main).getEeFeatureFlagInstance());
Assert.assertEquals(FeatureFlag.getInstance(process.getProcess()).getEnabledFeatures().length, 0);

View File

@ -74,9 +74,9 @@ public class SetLicenseKeyAPITest {
assertNotNull(process.checkOrWaitForEvent(PROCESS_STATE.STARTED));
Assert.assertNull(FeatureFlag.getInstance(process.getProcess()).getEeFeatureFlagInstance());
Assert.assertNull(FeatureFlag.getInstance(process.main).getEeFeatureFlagInstance());
Assert.assertEquals(0, FeatureFlag.getInstance(process.getProcess()).getEnabledFeatures().length);
Assert.assertEquals(FeatureFlag.getInstance(process.getProcess()).getEnabledFeatures().length, 0);
// set license key when ee folder does not exist
JsonObject requestBody = new JsonObject();

View File

@ -2,44 +2,34 @@
"_comment": "Contains list of implementation dependencies URL for this project. This is a generated file, don't modify the contents by hand.",
"list": [
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/11.0.12/tomcat-embed-core-11.0.12.jar",
"name":"tomcat-embed-core 11.0.12",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/11.0.12/tomcat-embed-core-11.0.12-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1.jar",
"name":"gson 2.3.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.3.1/gson-2.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/11.0.12/tomcat-annotations-api-11.0.12.jar",
"name":"tomcat-annotations-api 11.0.12",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/11.0.12/tomcat-annotations-api-11.0.12-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1.jar",
"name":"jackson-dataformat-yaml 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.16.1/jackson-dataformat-yaml-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1.jar",
"name":"gson 2.13.1",
"src":"https://repo.maven.apache.org/maven2/com/google/code/gson/gson/2.13.1/gson-2.13.1-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2.jar",
"name":"snakeyaml 2.2",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.2/snakeyaml-2.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0.jar",
"name":"error_prone_annotations 2.38.0",
"src":"https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.38.0/error_prone_annotations-2.38.0-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1.jar",
"name":"jackson-databind 2.16.1",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.16.1/jackson-databind-2.16.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2.jar",
"name":"jackson-dataformat-yaml 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18.jar",
"name":"tomcat-embed-core 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/embed/tomcat-embed-core/10.1.18/tomcat-embed-core-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3.jar",
"name":"snakeyaml 2.3",
"src":"https://repo.maven.apache.org/maven2/org/yaml/snakeyaml/2.3/snakeyaml-2.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.18.2/jackson-dataformat-cbor-2.18.2.jar",
"name":"jackson-dataformat-cbor 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.18.2/jackson-dataformat-cbor-2.18.2-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2.jar",
"name":"jackson-databind 2.18.2",
"src":"https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2-sources.jar"
"jar":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18.jar",
"name":"tomcat-annotations-api 10.1.18",
"src":"https://repo.maven.apache.org/maven2/org/apache/tomcat/tomcat-annotations-api/10.1.18/tomcat-annotations-api-10.1.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
@ -96,151 +86,6 @@
"name":"libphonenumber 8.13.25",
"src":"https://repo.maven.apache.org/maven2/com/googlecode/libphonenumber/libphonenumber/8.13.25/libphonenumber-8.13.25-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/webauthn4j/webauthn4j-core/0.28.6.RELEASE/webauthn4j-core-0.28.6.RELEASE.jar",
"name":"webauthn4j-core 0.28.6.RELEASE",
"src":"https://repo.maven.apache.org/maven2/com/webauthn4j/webauthn4j-core/0.28.6.RELEASE/webauthn4j-core-0.28.6.RELEASE-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-core/4.3.1/opensaml-core-4.3.1.jar",
"name":"opensaml-core 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-core/4.3.1/opensaml-core-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/net/shibboleth/utilities/java-support/8.4.1/java-support-8.4.1.jar",
"name":"java-support 8.4.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/net/shibboleth/utilities/java-support/8.4.1/java-support-8.4.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/guava/guava/31.1-jre/guava-31.1-jre.jar",
"name":"guava 31.1-jre",
"src":"https://repo.maven.apache.org/maven2/com/google/guava/guava/31.1-jre/guava-31.1-jre-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/guava/failureaccess/1.0.1/failureaccess-1.0.1.jar",
"name":"failureaccess 1.0.1",
"src":"https://repo.maven.apache.org/maven2/com/google/guava/failureaccess/1.0.1/failureaccess-1.0.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar",
"name":"listenablefuture 9999.0-empty-to-avoid-conflict-with-guava",
"src":"https://repo.maven.apache.org/maven2/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/com/google/j2objc/j2objc-annotations/1.3/j2objc-annotations-1.3.jar",
"name":"j2objc-annotations 1.3",
"src":"https://repo.maven.apache.org/maven2/com/google/j2objc/j2objc-annotations/1.3/j2objc-annotations-1.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/dropwizard/metrics/metrics-core/4.2.25/metrics-core-4.2.25.jar",
"name":"metrics-core 4.2.25",
"src":"https://repo.maven.apache.org/maven2/io/dropwizard/metrics/metrics-core/4.2.25/metrics-core-4.2.25-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-impl/4.3.1/opensaml-saml-impl-4.3.1.jar",
"name":"opensaml-saml-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-impl/4.3.1/opensaml-saml-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-impl/4.3.1/opensaml-xmlsec-impl-4.3.1.jar",
"name":"opensaml-xmlsec-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-impl/4.3.1/opensaml-xmlsec-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-impl/4.3.1/opensaml-security-impl-4.3.1.jar",
"name":"opensaml-security-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-impl/4.3.1/opensaml-security-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-api/4.3.1/opensaml-security-api-4.3.1.jar",
"name":"opensaml-security-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-security-api/4.3.1/opensaml-security-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-messaging-api/4.3.1/opensaml-messaging-api-4.3.1.jar",
"name":"opensaml-messaging-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-messaging-api/4.3.1/opensaml-messaging-api-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.5.14/httpclient-4.5.14.jar",
"name":"httpclient 4.5.14",
"src":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.5.14/httpclient-4.5.14-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.4.16/httpcore-4.4.16.jar",
"name":"httpcore 4.4.16",
"src":"https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.4.16/httpcore-4.4.16-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/cryptacular/cryptacular/1.2.5/cryptacular-1.2.5.jar",
"name":"cryptacular 1.2.5",
"src":"https://repo.maven.apache.org/maven2/org/cryptacular/cryptacular/1.2.5/cryptacular-1.2.5-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcprov-jdk18on/1.72/bcprov-jdk18on-1.72.jar",
"name":"bcprov-jdk18on 1.72",
"src":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcprov-jdk18on/1.72/bcprov-jdk18on-1.72-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcpkix-jdk18on/1.72/bcpkix-jdk18on-1.72.jar",
"name":"bcpkix-jdk18on 1.72",
"src":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcpkix-jdk18on/1.72/bcpkix-jdk18on-1.72-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcutil-jdk18on/1.72/bcutil-jdk18on-1.72.jar",
"name":"bcutil-jdk18on 1.72",
"src":"https://repo.maven.apache.org/maven2/org/bouncycastle/bcutil-jdk18on/1.72/bcutil-jdk18on-1.72-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-api/4.3.1/opensaml-xmlsec-api-4.3.1.jar",
"name":"opensaml-xmlsec-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-xmlsec-api/4.3.1/opensaml-xmlsec-api-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/santuario/xmlsec/2.3.4/xmlsec-2.3.4.jar",
"name":"xmlsec 2.3.4",
"src":"https://repo.maven.apache.org/maven2/org/apache/santuario/xmlsec/2.3.4/xmlsec-2.3.4-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-api/4.3.1/opensaml-saml-api-4.3.1.jar",
"name":"opensaml-saml-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-saml-api/4.3.1/opensaml-saml-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-api/4.3.1/opensaml-profile-api-4.3.1.jar",
"name":"opensaml-profile-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-api/4.3.1/opensaml-profile-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-api/4.3.1/opensaml-soap-api-4.3.1.jar",
"name":"opensaml-soap-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-api/4.3.1/opensaml-soap-api-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-impl/4.3.1/opensaml-soap-impl-4.3.1.jar",
"name":"opensaml-soap-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-soap-impl/4.3.1/opensaml-soap-impl-4.3.1-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-storage-api/4.3.1/opensaml-storage-api-4.3.1.jar",
"name":"opensaml-storage-api 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-storage-api/4.3.1/opensaml-storage-api-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/velocity/velocity-engine-core/2.3/velocity-engine-core-2.3.jar",
"name":"velocity-engine-core 2.3",
"src":"https://repo.maven.apache.org/maven2/org/apache/velocity/velocity-engine-core/2.3/velocity-engine-core-2.3-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11.jar",
"name":"commons-lang3 3.11",
"src":"https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.11/commons-lang3-3.11-sources.jar"
},
{
"jar":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-impl/4.3.1/opensaml-profile-impl-4.3.1.jar",
"name":"opensaml-profile-impl 4.3.1",
"src":"https://build.shibboleth.net/nexus/content/repositories/releases/org/opensaml/opensaml-profile-impl/4.3.1/opensaml-profile-impl-4.3.1-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/1.5.18/logback-core-1.5.18.jar",
"name":"logback-core 1.5.18",
@ -251,11 +96,6 @@
"name":"logback-classic 1.5.18",
"src":"https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/1.5.18/logback-classic-1.5.18-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/org/aspectj/aspectjrt/1.9.24/aspectjrt-1.9.24.jar",
"name":"aspectjrt 1.9.24",
"src":"https://repo.maven.apache.org/maven2/org/aspectj/aspectjrt/1.9.24/aspectjrt-1.9.24-sources.jar"
},
{
"jar":"https://repo.maven.apache.org/maven2/io/opentelemetry/opentelemetry-api/1.51.0/opentelemetry-api-1.51.0.jar",
"name":"opentelemetry-api 1.51.0",

Binary file not shown.

BIN
jar/core-9.1.2.jar Normal file

Binary file not shown.

View File

@ -1,6 +1,6 @@
{
"_comment": "contains a list of plugin interfaces branch names that this core supports",
"versions": [
"8.3"
"6.2"
]
}

View File

@ -24,7 +24,7 @@ public class ActiveUsers {
@TestOnly
public static void updateLastActive(Main main, String userId) {
try {
ActiveUsers.updateLastActive(ResourceDistributor.getAppForTesting().toAppIdentifier(),
ActiveUsers.updateLastActive(new AppIdentifier(null, null),
main, userId);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -55,6 +55,6 @@ public class ActiveUsers {
@TestOnly
public static int countUsersActiveSince(Main main, long time)
throws StorageQueryException, TenantOrAppNotFoundException {
return countUsersActiveSince(main, ResourceDistributor.getAppForTesting().toAppIdentifier(), time);
return countUsersActiveSince(main, new AppIdentifier(null, null), time);
}
}

View File

@ -20,11 +20,6 @@ import io.supertokens.cliOptions.CLIOptions;
import io.supertokens.config.Config;
import io.supertokens.config.CoreConfig;
import io.supertokens.cronjobs.Cronjobs;
import io.supertokens.cronjobs.bulkimport.ProcessBulkImportUsers;
import io.supertokens.cronjobs.cleanupOAuthSessionsAndChallenges.CleanupOAuthSessionsAndChallenges;
import io.supertokens.cronjobs.deleteExpiredSAMLData.DeleteExpiredSAMLData;
import io.supertokens.cronjobs.cleanupWebauthnExpiredData.CleanUpWebauthNExpiredDataCron;
import io.supertokens.cronjobs.deadlocklogger.DeadlockLogger;
import io.supertokens.cronjobs.deleteExpiredAccessTokenSigningKeys.DeleteExpiredAccessTokenSigningKeys;
import io.supertokens.cronjobs.deleteExpiredDashboardSessions.DeleteExpiredDashboardSessions;
import io.supertokens.cronjobs.deleteExpiredEmailVerificationTokens.DeleteExpiredEmailVerificationTokens;
@ -44,7 +39,6 @@ import io.supertokens.pluginInterface.exceptions.DbInitException;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.saml.SAMLBootstrap;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.telemetry.TelemetryProvider;
import io.supertokens.version.Version;
@ -67,8 +61,6 @@ public class Main {
// this is a special variable that will be set to true by TestingProcessManager
public static boolean isTesting = false;
// this flag is used in ProcessBulkImportUsersCronJobTest to skip the user validation
public static boolean isTesting_skipBulkImportUserValidationInCronJob = false;
// this is a special variable that will be set to true by TestingProcessManager
public static boolean makeConsolePrintSilent = false;
@ -95,9 +87,6 @@ public class Main {
private boolean waitToEnableFeatureFlag = false;
private final Object waitToEnableFeatureFlagLock = new Object();
//setting to true by default
private final Boolean bulkMigrationCronEnabled = System.getenv("BULK_MIGRATION_CRON_ENABLED") == null || Boolean.parseBoolean(System.getenv("BULK_MIGRATION_CRON_ENABLED"));
private boolean forceInMemoryDB = false;
@ -124,8 +113,6 @@ public class Main {
CLIOptions.load(this, args);
init();
} catch (Exception e) {
Logging.error(this, TenantIdentifier.BASE_TENANT, "What caused the crash: " + e.getMessage(), true,
e);
ProcessState.getInstance(this).addState(ProcessState.PROCESS_STATE.INIT_FAILURE, e);
throw e;
}
@ -160,12 +147,9 @@ public class Main {
// Handle kill signal gracefully
handleKillSignalForWhenItHappens();
StorageLayer.loadStorageUCL(CLIOptions.get(this).getInstallationPath() + "plugin/");
// loading configs for core from config.yaml file.
try {
Config.loadBaseConfig(this);
Logging.info(this, TenantIdentifier.BASE_TENANT, "Completed config.yaml loading.", true);
} catch (InvalidConfigException e) {
throw new QuitProgramException(e);
}
@ -173,11 +157,14 @@ public class Main {
// loading version file
Version.loadVersion(this, CLIOptions.get(this).getInstallationPath() + "version.yaml");
Logging.info(this, TenantIdentifier.BASE_TENANT, "Completed config.yaml loading.", true);
TelemetryProvider.initialize(this);
// loading storage layer
try {
StorageLayer.initPrimary(this, Config.getBaseConfigAsJsonObject(this));
StorageLayer.initPrimary(this, CLIOptions.get(this).getInstallationPath() + "plugin/",
Config.getBaseConfigAsJsonObject(this));
} catch (InvalidConfigException e) {
throw new QuitProgramException(e);
}
@ -185,9 +172,6 @@ public class Main {
// init file logging
Logging.initFileLogging(this);
// Required for SAML related stuff
SAMLBootstrap.initialize();
// initialise cron job handler
Cronjobs.init(this);
@ -275,22 +259,6 @@ public class Main {
// starts DeleteExpiredAccessTokenSigningKeys cronjob if the access token signing keys can change
Cronjobs.addCronjob(this, DeleteExpiredAccessTokenSigningKeys.init(this, uniqueUserPoolIdsTenants));
// initializes ProcessBulkImportUsers cronjob to process bulk import users
if(bulkMigrationCronEnabled) {
Cronjobs.addCronjob(this, ProcessBulkImportUsers.init(this, uniqueUserPoolIdsTenants));
}
Cronjobs.addCronjob(this, CleanupOAuthSessionsAndChallenges.init(this, uniqueUserPoolIdsTenants));
Cronjobs.addCronjob(this, CleanUpWebauthNExpiredDataCron.init(this, uniqueUserPoolIdsTenants));
// starts the DeadlockLogger if
if (Config.getBaseConfig(this).isDeadlockLoggerEnabled()) {
DeadlockLogger.getInstance().start();
}
Cronjobs.addCronjob(this, DeleteExpiredSAMLData.init(this, uniqueUserPoolIdsTenants));
// this is to ensure tenantInfos are in sync for the new cron job as well
MultitenancyHelper.getInstance(this).refreshCronjobs();
@ -301,7 +269,6 @@ public class Main {
Webserver.getInstance(this).start();
// this is a sign to the controlling script that this process has started.
createDotStartedFileForThisProcess();
// NOTE: If the message below is changed, make sure to also change the corresponding check in the CLI program
@ -380,16 +347,11 @@ public class Main {
}
private void createDotStartedFileForThisProcess() throws IOException {
String startedDir = ".started";
if (isTesting) {
startedDir = ".started" + System.getProperty("org.gradle.test.worker", "");
}
CoreConfig config = Config.getBaseConfig(this);
String fileLocation = CLIOptions.get(this).getTempDirLocation() == null ? CLIOptions.get(this).getInstallationPath() : CLIOptions.get(this).getTempDirLocation();
String fileName = OperatingSystem.getOS() == OperatingSystem.OS.WINDOWS
? fileLocation + startedDir + "\\" + config.getHost(this) + "-"
? CLIOptions.get(this).getInstallationPath() + ".started\\" + config.getHost(this) + "-"
+ config.getPort(this)
: fileLocation + startedDir + "/" + config.getHost(this) + "-"
: CLIOptions.get(this).getInstallationPath() + ".started/" + config.getHost(this) + "-"
+ config.getPort(this);
File dotStarted = new File(fileName);
if (!dotStarted.exists()) {
@ -432,10 +394,9 @@ public class Main {
@TestOnly
public void killForTestingAndWaitForShutdown() throws InterruptedException {
// Do not kill for now
assertIsTesting();
wakeUpMainThreadToShutdown();
mainThread.join();
assertIsTesting();
wakeUpMainThreadToShutdown();
mainThread.join();
}
// must not throw any error

View File

@ -104,7 +104,7 @@ public class ProcessState extends ResourceDistributor.SingletonResource {
public static class EventAndException {
public Exception exception;
public JsonObject data;
public PROCESS_STATE state;
PROCESS_STATE state;
public EventAndException(PROCESS_STATE state, Exception e) {
this.state = state;

View File

@ -35,28 +35,16 @@ public class ResourceDistributor {
private final Map<KeyClass, SingletonResource> resources = new HashMap<>(1);
private final Main main;
private static TenantIdentifier appUsedForTesting = TenantIdentifier.BASE_TENANT;
public ResourceDistributor(Main main) {
this.main = main;
}
@TestOnly
public static void setAppForTesting(TenantIdentifier app) {
appUsedForTesting = app;
}
@TestOnly
public static TenantIdentifier getAppForTesting() {
return appUsedForTesting;
}
public SingletonResource getResource(AppIdentifier appIdentifier, @Nonnull String key)
public synchronized SingletonResource getResource(AppIdentifier appIdentifier, @Nonnull String key)
throws TenantOrAppNotFoundException {
return getResource(appIdentifier.getAsPublicTenantIdentifier(), key);
}
public SingletonResource getResource(TenantIdentifier tenantIdentifier, @Nonnull String key)
public synchronized SingletonResource getResource(TenantIdentifier tenantIdentifier, @Nonnull String key)
throws TenantOrAppNotFoundException {
// first we do exact match
SingletonResource resource = resources.get(new KeyClass(tenantIdentifier, key));
@ -70,6 +58,14 @@ public class ResourceDistributor {
throw new TenantOrAppNotFoundException(tenantIdentifier);
}
MultitenancyHelper.getInstance(main).refreshTenantsInCoreBasedOnChangesInCoreConfigOrIfTenantListChanged(true);
// we try again..
resource = resources.get(new KeyClass(tenantIdentifier, key));
if (resource != null) {
return resource;
}
// then we see if the user has configured anything to do with connectionUriDomain, and if they have,
// then we must return null cause the user has not specifically added tenantId to it
for (KeyClass currKey : resources.keySet()) {
@ -93,11 +89,11 @@ public class ResourceDistributor {
}
@TestOnly
public SingletonResource getResource(@Nonnull String key) {
return resources.get(new KeyClass(appUsedForTesting, key));
public synchronized SingletonResource getResource(@Nonnull String key) {
return resources.get(new KeyClass(new TenantIdentifier(null, null, null), key));
}
public SingletonResource setResource(TenantIdentifier tenantIdentifier,
public synchronized SingletonResource setResource(TenantIdentifier tenantIdentifier,
@Nonnull String key,
SingletonResource resource) {
SingletonResource alreadyExists = resources.get(new KeyClass(tenantIdentifier, key));
@ -108,7 +104,7 @@ public class ResourceDistributor {
return resource;
}
public SingletonResource removeResource(TenantIdentifier tenantIdentifier,
public synchronized SingletonResource removeResource(TenantIdentifier tenantIdentifier,
@Nonnull String key) {
SingletonResource singletonResource = resources.get(new KeyClass(tenantIdentifier, key));
if (singletonResource == null) {
@ -118,18 +114,18 @@ public class ResourceDistributor {
return singletonResource;
}
public SingletonResource setResource(AppIdentifier appIdentifier,
public synchronized SingletonResource setResource(AppIdentifier appIdentifier,
@Nonnull String key,
SingletonResource resource) {
return setResource(appIdentifier.getAsPublicTenantIdentifier(), key, resource);
}
public SingletonResource removeResource(AppIdentifier appIdentifier,
public synchronized SingletonResource removeResource(AppIdentifier appIdentifier,
@Nonnull String key) {
return removeResource(appIdentifier.getAsPublicTenantIdentifier(), key);
}
public void clearAllResourcesWithResourceKey(String inputKey) {
public synchronized void clearAllResourcesWithResourceKey(String inputKey) {
List<KeyClass> toRemove = new ArrayList<>();
resources.forEach((key, value) -> {
if (key.key.equals(inputKey)) {
@ -141,7 +137,7 @@ public class ResourceDistributor {
}
}
public Map<KeyClass, SingletonResource> getAllResourcesWithResourceKey(String inputKey) {
public synchronized Map<KeyClass, SingletonResource> getAllResourcesWithResourceKey(String inputKey) {
Map<KeyClass, SingletonResource> result = new HashMap<>();
resources.forEach((key, value) -> {
if (key.key.equals(inputKey)) {
@ -152,9 +148,9 @@ public class ResourceDistributor {
}
@TestOnly
public SingletonResource setResource(@Nonnull String key,
public synchronized SingletonResource setResource(@Nonnull String key,
SingletonResource resource) {
return setResource(appUsedForTesting, key, resource);
return setResource(new TenantIdentifier(null, null, null), key, resource);
}
public interface Func<T> {

View File

@ -1,31 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.useridmapping.UserIdMapping;
public class StorageAndUserIdMappingForBulkImport extends StorageAndUserIdMapping {
public String userIdInQuestion;
public StorageAndUserIdMappingForBulkImport(Storage storage,
UserIdMapping userIdMapping, String userIdInQuestion) {
super(storage, userIdMapping);
this.userIdInQuestion = userIdInQuestion;
}
}

View File

@ -17,19 +17,18 @@
package io.supertokens.authRecipe;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.authRecipe.exception.*;
import io.supertokens.bulkimport.BulkImportUserUtils;
import io.supertokens.authRecipe.exception.AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException;
import io.supertokens.authRecipe.exception.InputUserIdIsNotAPrimaryUserException;
import io.supertokens.authRecipe.exception.RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException;
import io.supertokens.authRecipe.exception.RecipeUserIdAlreadyLinkedWithPrimaryUserIdException;
import io.supertokens.featureflag.EE_FEATURES;
import io.supertokens.featureflag.FeatureFlag;
import io.supertokens.featureflag.exceptions.FeatureNotEnabledException;
import io.supertokens.multitenancy.exception.BadPermissionException;
import io.supertokens.pluginInterface.RECIPE_ID;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.*;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeUserInfo;
import io.supertokens.pluginInterface.authRecipe.LoginMethod;
import io.supertokens.pluginInterface.authRecipe.sqlStorage.AuthRecipeSQLStorage;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.exceptions.BulkImportBatchInsertException;
import io.supertokens.pluginInterface.dashboard.DashboardSearchTags;
import io.supertokens.pluginInterface.emailpassword.exceptions.UnknownUserIdException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
@ -43,13 +42,10 @@ import io.supertokens.pluginInterface.useridmapping.UserIdMapping;
import io.supertokens.session.Session;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.useridmapping.UserIdType;
import io.supertokens.utils.Utils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.TestOnly;
import javax.annotation.Nullable;
import java.util.*;
import java.util.stream.Collectors;
/*This files contains functions that are common for all auth recipes*/
@ -60,7 +56,7 @@ public class AuthRecipe {
@TestOnly
public static boolean unlinkAccounts(Main main, String recipeUserId)
throws StorageQueryException, UnknownUserIdException, InputUserIdIsNotAPrimaryUserException {
return unlinkAccounts(main, ResourceDistributor.getAppForTesting().toAppIdentifier(), StorageLayer.getStorage(main), recipeUserId);
return unlinkAccounts(main, new AppIdentifier(null, null), StorageLayer.getStorage(main), recipeUserId);
}
@ -125,7 +121,7 @@ public class AuthRecipe {
@TestOnly
public static AuthRecipeUserInfo getUserById(Main main, String userId)
throws StorageQueryException {
return getUserById(ResourceDistributor.getAppForTesting().toAppIdentifier(), StorageLayer.getStorage(main), userId);
return getUserById(new AppIdentifier(null, null), StorageLayer.getStorage(main), userId);
}
public static AuthRecipeUserInfo getUserById(AppIdentifier appIdentifier, Storage storage, String userId)
@ -133,18 +129,6 @@ public class AuthRecipe {
return StorageUtils.getAuthRecipeStorage(storage).getPrimaryUserById(appIdentifier, userId);
}
public static List<AuthRecipeUserInfo> getUsersById(AppIdentifier appIdentifier, Storage storage, List<String> userIds)
throws StorageQueryException {
AuthRecipeSQLStorage authStorage = StorageUtils.getAuthRecipeStorage(storage);
try {
return authStorage.startTransaction(con -> {
return authStorage.getPrimaryUsersByIds_Transaction(appIdentifier, con, userIds);
});
} catch (StorageTransactionLogicException e) {
throw new StorageQueryException(e);
}
}
public static class CreatePrimaryUserResult {
public AuthRecipeUserInfo user;
public boolean wasAlreadyAPrimaryUser;
@ -155,24 +139,10 @@ public class AuthRecipe {
}
}
public static class CreatePrimaryUserBulkResult {
public BulkImportUser user;
public BulkImportUser.LoginMethod primaryLoginMethod;
public boolean wasAlreadyAPrimaryUser;
public Exception error;
public CreatePrimaryUserBulkResult(BulkImportUser user, BulkImportUser.LoginMethod primaryLoginMethod,
boolean wasAlreadyAPrimaryUser, Exception error) {
this.user = user;
this.primaryLoginMethod = primaryLoginMethod;
this.wasAlreadyAPrimaryUser = wasAlreadyAPrimaryUser;
this.error = error;
}
}
public static class CanLinkAccountsResult {
public String recipeUserId;
public String primaryUserId;
public boolean alreadyLinked;
public CanLinkAccountsResult(String recipeUserId, String primaryUserId, boolean alreadyLinked) {
@ -182,29 +152,12 @@ public class AuthRecipe {
}
}
public static class CanLinkAccountsBulkResult {
public String recipeUserId;
public String primaryUserId;
public Exception error;
public BulkImportUser bulkImportUser;
public boolean alreadyLinked;
public CanLinkAccountsBulkResult(String recipeUserId, String primaryUserId, boolean alreadyLinked, Exception error,
BulkImportUser bulkImportUser) {
this.recipeUserId = recipeUserId;
this.primaryUserId = primaryUserId;
this.alreadyLinked = alreadyLinked;
this.error = error;
this.bulkImportUser = bulkImportUser;
}
}
@TestOnly
public static CanLinkAccountsResult canLinkAccounts(Main main, String recipeUserId, String primaryUserId)
throws StorageQueryException, UnknownUserIdException, InputUserIdIsNotAPrimaryUserException,
RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException,
AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException {
return canLinkAccounts(ResourceDistributor.getAppForTesting().toAppIdentifier(), StorageLayer.getStorage(main), recipeUserId,
return canLinkAccounts(new AppIdentifier(null, null), StorageLayer.getStorage(main), recipeUserId,
primaryUserId);
}
@ -295,65 +248,10 @@ public class AuthRecipe {
return new CanLinkAccountsResult(recipeUser.getSupertokensUserId(), primaryUser.getSupertokensUserId(), false);
}
private static List<CanLinkAccountsBulkResult> canLinkMultipleAccountsHelperForBulkImport(TransactionConnection con,
AppIdentifier appIdentifier,
Storage storage,
List<BulkImportUser> users,
List<AuthRecipeUserInfo> allUsersWithExtraData)
throws StorageQueryException {
AuthRecipeSQLStorage authRecipeStorage = StorageUtils.getAuthRecipeStorage(storage);
List<CanLinkAccountsBulkResult> results = new ArrayList<>();
Map<String, String> recipeUserIdByPrimaryUserId = BulkImportUserUtils.collectRecipeIdsToPrimaryIds(users);
if(recipeUserIdByPrimaryUserId != null && !recipeUserIdByPrimaryUserId.isEmpty()) {
for(Map.Entry<String, String> recipeUserByPrimaryUser : recipeUserIdByPrimaryUserId.entrySet()) {
String recipeUserId = recipeUserByPrimaryUser.getKey();
String primaryUserId = recipeUserByPrimaryUser.getValue();
BulkImportUser.LoginMethod primaryUser = BulkImportUserUtils.findLoginMethodByRecipeUserId(users, primaryUserId);
BulkImportUser.LoginMethod recipeUser = BulkImportUserUtils.findLoginMethodByRecipeUserId(users, recipeUserId);
if(primaryUser == null || recipeUser == null) {
results.add(new CanLinkAccountsBulkResult(recipeUserId, primaryUserId, false, new UnknownUserIdException(), null));
} else if(recipeUser.isPrimary) {
if (recipeUser.superTokensUserId.equals(primaryUser.superTokensUserId)) {
results.add(new CanLinkAccountsBulkResult(recipeUserId, primaryUserId, true, null, null));
} else {
results.add(new CanLinkAccountsBulkResult(recipeUserId, primaryUserId, false,
new BulkImportRecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException(recipeUserId), null));
}
} else {
Set<String> tenantIds = new HashSet<>();
tenantIds.addAll(recipeUser.tenantIds);
tenantIds.addAll(primaryUser.tenantIds);
try {
bulkCheckIfLoginMethodCanBeLinkedOnTenant(con, appIdentifier, authRecipeStorage, tenantIds,
recipeUser, primaryUserId, allUsersWithExtraData);
BulkImportUser currentPrimaryUser = BulkImportUserUtils.findUserByPrimaryId(users, primaryUserId);
for (BulkImportUser.LoginMethod currLoginMethod : currentPrimaryUser.loginMethods) {
bulkCheckIfLoginMethodCanBeLinkedOnTenant(con, appIdentifier, authRecipeStorage, tenantIds,
currLoginMethod, primaryUserId, allUsersWithExtraData);
}
results.add(new CanLinkAccountsBulkResult(recipeUserId, primaryUserId, false, null, currentPrimaryUser));
} catch (AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException exception) {
results.add(new CanLinkAccountsBulkResult(recipeUserId, primaryUserId, false, exception, null));
}
}
}
}
return results;
}
private static void checkIfLoginMethodCanBeLinkedOnTenant(TransactionConnection con, AppIdentifier appIdentifier,
AuthRecipeSQLStorage authRecipeStorage,
Set<String> tenantIds, LoginMethod currLoginMethod,
AuthRecipeUserInfo primaryUser)
AuthRecipeSQLStorage authRecipeStorage,
Set<String> tenantIds, LoginMethod currLoginMethod,
AuthRecipeUserInfo primaryUser)
throws StorageQueryException, AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException {
// we loop through the union of both the user's tenantIds and check that the criteria for
// linking accounts is not violated in any of them. We do a union and not an intersection
@ -371,8 +269,9 @@ public class AuthRecipe {
// tenants of the same storage - therefore, the storage will be the same.
if (currLoginMethod.email != null) {
AuthRecipeUserInfo[] usersWithSameEmail =
authRecipeStorage.listPrimaryUsersByEmail_Transaction(appIdentifier, con, currLoginMethod.email);
AuthRecipeUserInfo[] usersWithSameEmail = authRecipeStorage
.listPrimaryUsersByEmail_Transaction(appIdentifier, con,
currLoginMethod.email);
for (AuthRecipeUserInfo user : usersWithSameEmail) {
if (!user.tenantIds.contains(tenantId)) {
continue;
@ -386,8 +285,8 @@ public class AuthRecipe {
}
if (currLoginMethod.phoneNumber != null) {
AuthRecipeUserInfo[] usersWithSamePhoneNumber =
authRecipeStorage.listPrimaryUsersByPhoneNumber_Transaction(appIdentifier, con,
AuthRecipeUserInfo[] usersWithSamePhoneNumber = authRecipeStorage
.listPrimaryUsersByPhoneNumber_Transaction(appIdentifier, con,
currLoginMethod.phoneNumber);
for (AuthRecipeUserInfo user : usersWithSamePhoneNumber) {
if (!user.tenantIds.contains(tenantId)) {
@ -416,88 +315,9 @@ public class AuthRecipe {
userWithSameThirdParty.getSupertokensUserId(),
"This user's third party login is already associated with another" +
" user ID");
}
}
}
}
}
private static void bulkCheckIfLoginMethodCanBeLinkedOnTenant(TransactionConnection con, AppIdentifier appIdentifier,
AuthRecipeSQLStorage authRecipeStorage,
Set<String> tenantIds, BulkImportUser.LoginMethod currLoginMethod,
String primaryUserId,
List<AuthRecipeUserInfo> allUsersWithExtraData)
throws StorageQueryException, AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException {
// we loop through the union of both the user's tenantIds and check that the criteria for
// linking accounts is not violated in any of them. We do a union and not an intersection
// cause if we did an intersection, and that yields that account linking is allowed, it could
// result in one tenant having two primary users with the same email. For example:
// - tenant1 has u1 with email e, and u2 with email e, primary user (one is ep, one is tp)
// - tenant2 has u3 with email e, primary user (passwordless)
// now if we want to link u3 with u1, we have to deny it cause if we don't, it will result in
// u1 and u2 to be primary users with the same email in the same tenant. If we do an
// intersection, we will get an empty set, but if we do a union, we will get both the tenants and
// do the checks in both.
for (String tenantId : tenantIds) {
// we do not bother with getting the storage for each tenant here because
// we get the tenants from the user itself, and the user can only be shared across
// tenants of the same storage - therefore, the storage will be the same.
if (currLoginMethod.email != null) {
List<AuthRecipeUserInfo> usersWithSameEmail =
allUsersWithExtraData.stream().filter(authRecipeUserInfo -> Arrays.stream(
authRecipeUserInfo.loginMethods).map(loginMethod -> loginMethod.email).collect(
Collectors.toList()).contains(currLoginMethod.email)).collect(Collectors.toList());
for (AuthRecipeUserInfo user : usersWithSameEmail) {
if (!user.tenantIds.contains(tenantId)) {
continue;
}
if (user.isPrimaryUser && !user.getSupertokensUserId().equals(primaryUserId)) {
throw new AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException(
user.getSupertokensUserId(),
"This user's email is already associated with another user ID");
}
}
}
if (currLoginMethod.phoneNumber != null) {
List<AuthRecipeUserInfo> usersWithSamePhoneNumber =
allUsersWithExtraData.stream().filter(authRecipeUserInfo -> Arrays.stream(
authRecipeUserInfo.loginMethods).map(loginMethod -> loginMethod.phoneNumber).collect(
Collectors.toList()).contains(currLoginMethod.phoneNumber)).collect(Collectors.toList());
for (AuthRecipeUserInfo user : usersWithSamePhoneNumber) {
if (!user.tenantIds.contains(tenantId)) {
continue;
}
if (user.isPrimaryUser && !user.getSupertokensUserId().equals(primaryUserId)) {
throw new AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException(
user.getSupertokensUserId(),
"This user's phone number is already associated with another user" +
" ID");
}
}
}
if (currLoginMethod.thirdPartyId != null) {
List<AuthRecipeUserInfo> extraUsersWithThirdParty = allUsersWithExtraData.stream().filter(authRecipeUserInfo -> Arrays.stream(
authRecipeUserInfo.loginMethods).anyMatch(loginMethod1 -> loginMethod1.thirdParty != null)).collect(Collectors.toList());
for(AuthRecipeUserInfo extraUser : extraUsersWithThirdParty) {
if(extraUser.isPrimaryUser && extraUser.tenantIds.contains(tenantId)
&& !extraUser.getSupertokensUserId().equals(primaryUserId)) {
for (LoginMethod loginMethodExtra : extraUser.loginMethods) {
if (loginMethodExtra.thirdParty != null &&
loginMethodExtra.thirdParty.userId.equals(currLoginMethod.thirdPartyUserId)
&& loginMethodExtra.thirdParty.id.equals(currLoginMethod.thirdPartyId)) {
throw new AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException(
extraUser.getSupertokensUserId(),
"This user's third party login is already associated with another" +
" user ID");
}
}
}
}
}
}
}
@ -509,7 +329,7 @@ public class AuthRecipe {
FeatureNotEnabledException, InputUserIdIsNotAPrimaryUserException,
RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException {
try {
return linkAccounts(main, ResourceDistributor.getAppForTesting().toAppIdentifier(),
return linkAccounts(main, new AppIdentifier(null, null),
StorageLayer.getStorage(main), recipeUserId, primaryUserId);
} catch (TenantOrAppNotFoundException e) {
throw new RuntimeException(e);
@ -523,7 +343,8 @@ public class AuthRecipe {
RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException, InputUserIdIsNotAPrimaryUserException,
UnknownUserIdException, TenantOrAppNotFoundException, FeatureNotEnabledException {
if (!Utils.isAccountLinkingEnabled(main, appIdentifier)) {
if (Arrays.stream(FeatureFlag.getInstance(main, appIdentifier).getEnabledFeatures())
.noneMatch(t -> t == EE_FEATURES.ACCOUNT_LINKING || t == EE_FEATURES.MFA)) {
throw new FeatureNotEnabledException(
"Account linking feature is not enabled for this app. Please contact support to enable it.");
}
@ -580,48 +401,6 @@ public class AuthRecipe {
}
}
public static void linkMultipleAccountsForBulkImport(Main main, AppIdentifier appIdentifier,
Storage storage,
List<BulkImportUser> users,
List<AuthRecipeUserInfo> usersWithSameExtraData)
throws StorageQueryException, TenantOrAppNotFoundException, FeatureNotEnabledException {
if (!Utils.isAccountLinkingEnabled(main, appIdentifier)) {
throw new FeatureNotEnabledException(
"Account linking feature is not enabled for this app. Please contact support to enable it.");
}
AuthRecipeSQLStorage authRecipeStorage = StorageUtils.getAuthRecipeStorage(storage);
Map<String, Exception> errorByUserId = new HashMap<>();
try {
authRecipeStorage.startTransaction(con -> {
List<CanLinkAccountsBulkResult> canLinkAccounts = canLinkMultipleAccountsHelperForBulkImport(con, appIdentifier,
authRecipeStorage, users, usersWithSameExtraData);
Map<String, String> recipeUserByPrimaryUserNeedsLinking = new HashMap<>();
if(!canLinkAccounts.isEmpty()){
for(CanLinkAccountsBulkResult canLinkAccountsBulkResult : canLinkAccounts) {
if(!canLinkAccountsBulkResult.alreadyLinked && canLinkAccountsBulkResult.error != null) {
errorByUserId.put(canLinkAccountsBulkResult.recipeUserId, canLinkAccountsBulkResult.error);
} else {
recipeUserByPrimaryUserNeedsLinking.put(canLinkAccountsBulkResult.recipeUserId, canLinkAccountsBulkResult.primaryUserId);
}
}
// link the remaining
authRecipeStorage.linkMultipleAccounts_Transaction(appIdentifier, con, recipeUserByPrimaryUserNeedsLinking);
authRecipeStorage.commitTransaction(con);
}
if(!errorByUserId.isEmpty()) {
throw new StorageQueryException(new BulkImportBatchInsertException("link accounts errors", errorByUserId));
}
return null;
});
} catch (StorageTransactionLogicException e) {
throw new StorageQueryException(e);
}
}
public static class LinkAccountsResult {
public final AuthRecipeUserInfo user;
public final boolean wasAlreadyLinked;
@ -632,24 +411,12 @@ public class AuthRecipe {
}
}
public static class LinkAccountsBulkResult {
public final BulkImportUser user;
public final boolean wasAlreadyLinked;
public final Exception error;
public LinkAccountsBulkResult(BulkImportUser user, boolean wasAlreadyLinked, Exception error) {
this.user = user;
this.wasAlreadyLinked = wasAlreadyLinked;
this.error = error;
}
}
@TestOnly
public static CreatePrimaryUserResult canCreatePrimaryUser(Main main,
String recipeUserId)
throws StorageQueryException, AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException,
RecipeUserIdAlreadyLinkedWithPrimaryUserIdException, UnknownUserIdException {
return canCreatePrimaryUser(ResourceDistributor.getAppForTesting().toAppIdentifier(), StorageLayer.getStorage(main), recipeUserId);
return canCreatePrimaryUser(new AppIdentifier(null, null), StorageLayer.getStorage(main), recipeUserId);
}
public static CreatePrimaryUserResult canCreatePrimaryUser(AppIdentifier appIdentifier,
@ -761,159 +528,9 @@ public class AuthRecipe {
}
}
return new CreatePrimaryUserResult(targetUser, false);
}
private static CreatePrimaryUsersResultHolder canCreatePrimaryUsersHelperForBulkImport(TransactionConnection con,
AppIdentifier appIdentifier,
Storage storage,
List<BulkImportUser> bulkImportUsers)
throws StorageQueryException, UnknownUserIdException{
AuthRecipeSQLStorage authRecipeStorage = StorageUtils.getAuthRecipeStorage(storage);
if (bulkImportUsers == null || bulkImportUsers.isEmpty()) {
throw new UnknownUserIdException();
}
DistinctAuthIdentifiers mailPhoneThirdParty = getDistinctAuthIdentifiers(bulkImportUsers);
List<CreatePrimaryUserBulkResult> results = new ArrayList<>();
List<AuthRecipeUserInfo> allUsersWithProvidedExtraData =
List.of(authRecipeStorage.
listPrimaryUsersByMultipleEmailsOrPhoneNumbersOrThirdparty_Transaction(appIdentifier, con,
new ArrayList<>(mailPhoneThirdParty.allEmails), new ArrayList<>(mailPhoneThirdParty.allPhoneNumber),
mailPhoneThirdParty.allThirdParty)); // this is multiple - not so cheap DB query, but we need to do it
for (BulkImportUser targetUser : bulkImportUsers) {
BulkImportUser.LoginMethod primaryLoginMethod = BulkImportUserUtils.getPrimaryLoginMethod(targetUser);
for (BulkImportUser.LoginMethod loginMethod : targetUser.loginMethods) {
// note here: account takeover risk checks are done in the sdk. The situation in which someone registers
// for example with a thirparty which also verifies email address and later someone else tries to register
// with the same email address but with emailpassword is not handled here. This is because the sdk
// will handle this. In the bulk import we have no means to check this.
boolean errorFound = false;
for (String tenantId : loginMethod.tenantIds) {
if (loginMethod.email != null) {
List<AuthRecipeUserInfo> usersWithSameEmail = allUsersWithProvidedExtraData.stream()
.filter(authRecipeUserInfo -> Arrays.stream(
authRecipeUserInfo.loginMethods).map(loginMethod1 -> loginMethod1.email)
.collect(Collectors.toList()).contains(loginMethod.email)).collect(
Collectors.toList());
for (AuthRecipeUserInfo user : usersWithSameEmail) {
if (!user.tenantIds.contains(tenantId)) {
continue;
}
if (user.isPrimaryUser) {
results.add(new CreatePrimaryUserBulkResult(targetUser, primaryLoginMethod, false,
new AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException(
user.getSupertokensUserId(),
"This user's email is already associated with another user ID")));
errorFound = true;
break;
}
}
}
if (loginMethod.phoneNumber != null) {
List<AuthRecipeUserInfo> usersWithSamePhoneNumber = allUsersWithProvidedExtraData.stream()
.filter(authRecipeUserInfo -> Arrays.stream(
authRecipeUserInfo.loginMethods).map(loginMethod1 -> loginMethod1.phoneNumber)
.collect(Collectors.toList()).contains(loginMethod.phoneNumber)).collect(
Collectors.toList());
for (AuthRecipeUserInfo user : usersWithSamePhoneNumber) {
if (!user.tenantIds.contains(tenantId)) {
continue;
}
if (user.isPrimaryUser) {
results.add(new CreatePrimaryUserBulkResult(targetUser, primaryLoginMethod, false,
new AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException(
user.getSupertokensUserId(),
"This user's phone number is already associated with another user" +
" ID")));
errorFound = true;
break;
}
}
}
if (loginMethod.thirdPartyId != null && loginMethod.thirdPartyUserId != null) {
List<AuthRecipeUserInfo> extraUsersWithThirdParty = allUsersWithProvidedExtraData.stream()
.filter(authRecipeUserInfo -> Arrays.stream(
authRecipeUserInfo.loginMethods)
.anyMatch(loginMethod1 -> loginMethod1.thirdParty != null))
.collect(Collectors.toList());
for (AuthRecipeUserInfo extraUser : extraUsersWithThirdParty) {
if (extraUser.isPrimaryUser && extraUser.tenantIds.contains(tenantId)) {
for (LoginMethod loginMethodExtra : extraUser.loginMethods) {
if (loginMethodExtra.thirdParty != null &&
loginMethodExtra.thirdParty.userId.equals(loginMethod.thirdPartyUserId)
&& loginMethodExtra.thirdParty.id.equals(loginMethod.thirdPartyId)) {
results.add(
new CreatePrimaryUserBulkResult(targetUser, primaryLoginMethod, false,
new AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException(
extraUser.getSupertokensUserId(),
"This user's third party login is already associated with another" +
" user ID")));
errorFound = true;
break;
}
}
}
}
}
if (!errorFound) {
results.add(new CreatePrimaryUserBulkResult(targetUser, primaryLoginMethod, false, null));
}
}
}
}
CreatePrimaryUsersResultHolder resultHolder = new CreatePrimaryUsersResultHolder();
resultHolder.createPrimaryUserBulkResults = results;
resultHolder.usersWithSameExtraData = allUsersWithProvidedExtraData;
return resultHolder;
}
@NotNull
private static DistinctAuthIdentifiers getDistinctAuthIdentifiers(List<BulkImportUser> bulkImportUsers) {
Set<String> allEmails = new HashSet<>();
Set<String> allPhoneNumber = new HashSet<>();
Map<String, String> allThirdParty = new HashMap<>();
for (BulkImportUser user : bulkImportUsers) {
for (BulkImportUser.LoginMethod loginMethod : user.loginMethods) {
if (loginMethod.email != null) {
allEmails.add(loginMethod.email);
}
if (loginMethod.phoneNumber != null) {
allPhoneNumber.add(loginMethod.phoneNumber);
}
if (loginMethod.thirdPartyId != null && loginMethod.thirdPartyUserId != null) {
allThirdParty.put(loginMethod.thirdPartyUserId, loginMethod.thirdPartyId);
}
}
}
DistinctAuthIdentifiers mailPhoneThirdparty = new DistinctAuthIdentifiers(allEmails, allPhoneNumber, allThirdParty);
return mailPhoneThirdparty;
}
private static class DistinctAuthIdentifiers {
public final Set<String> allEmails;
public final Set<String> allPhoneNumber;
public final Map<String, String> allThirdParty;
public DistinctAuthIdentifiers(Set<String> allEmails, Set<String> allPhoneNumber, Map<String, String> allThirdParty) {
this.allEmails = allEmails;
this.allPhoneNumber = allPhoneNumber;
this.allThirdParty = allThirdParty;
}
}
@TestOnly
public static CreatePrimaryUserResult createPrimaryUser(Main main,
String recipeUserId)
@ -921,7 +538,7 @@ public class AuthRecipe {
RecipeUserIdAlreadyLinkedWithPrimaryUserIdException, UnknownUserIdException,
FeatureNotEnabledException {
try {
return createPrimaryUser(main, ResourceDistributor.getAppForTesting().toAppIdentifier(), StorageLayer.getStorage(main), recipeUserId);
return createPrimaryUser(main, new AppIdentifier(null, null), StorageLayer.getStorage(main), recipeUserId);
} catch (TenantOrAppNotFoundException e) {
throw new RuntimeException(e);
}
@ -935,7 +552,8 @@ public class AuthRecipe {
RecipeUserIdAlreadyLinkedWithPrimaryUserIdException, UnknownUserIdException, TenantOrAppNotFoundException,
FeatureNotEnabledException {
if (!Utils.isAccountLinkingEnabled(main, appIdentifier)) {
if (Arrays.stream(FeatureFlag.getInstance(main, appIdentifier).getEnabledFeatures())
.noneMatch(t -> t == EE_FEATURES.ACCOUNT_LINKING || t == EE_FEATURES.MFA)) {
throw new FeatureNotEnabledException(
"Account linking feature is not enabled for this app. Please contact support to enable it.");
}
@ -945,7 +563,7 @@ public class AuthRecipe {
return authRecipeStorage.startTransaction(con -> {
try {
CreatePrimaryUserResult result = canCreatePrimaryUserHelper(con, appIdentifier, authRecipeStorage,
CreatePrimaryUserResult result = canCreatePrimaryUserHelper(con, appIdentifier, authRecipeStorage,
recipeUserId);
if (result.wasAlreadyAPrimaryUser) {
return result;
@ -975,182 +593,75 @@ public class AuthRecipe {
}
}
//helper class to return together the results of primary user creation and the users with the same extradata (email, phone, etc)
public static class CreatePrimaryUsersResultHolder {
public List<CreatePrimaryUserBulkResult> createPrimaryUserBulkResults;
public List<AuthRecipeUserInfo> usersWithSameExtraData;
}
public static CreatePrimaryUsersResultHolder createPrimaryUsersForBulkImport(Main main,
AppIdentifier appIdentifier,
Storage storage,
List<BulkImportUser> bulkImportUsers)
throws StorageQueryException, TenantOrAppNotFoundException,
FeatureNotEnabledException {
if (!Utils.isAccountLinkingEnabled(main, appIdentifier)) {
throw new FeatureNotEnabledException(
"Account linking feature is not enabled for this app. Please contact support to enable it.");
}
AuthRecipeSQLStorage authRecipeStorage = StorageUtils.getAuthRecipeStorage(storage);
Map<String, Exception> errorsByUserId = new HashMap<>();
try {
return authRecipeStorage.startTransaction(con -> {
try {
CreatePrimaryUsersResultHolder resultHolder = canCreatePrimaryUsersHelperForBulkImport(con, appIdentifier, authRecipeStorage,
bulkImportUsers);
List<CreatePrimaryUserBulkResult> results = resultHolder.createPrimaryUserBulkResults;
List<CreatePrimaryUserBulkResult> canMakePrimaryUsers = new ArrayList<>();
for(CreatePrimaryUserBulkResult result : results) {
if (result.wasAlreadyAPrimaryUser) {
continue;
}
if(result.error != null) {
errorsByUserId.put(result.user.id, result.error);
continue;
}
canMakePrimaryUsers.add(result);
}
authRecipeStorage.makePrimaryUsers_Transaction(appIdentifier, con,
canMakePrimaryUsers.stream().map(canMakePrimaryUser -> canMakePrimaryUser.user.id).collect(
Collectors.toList()));
authRecipeStorage.commitTransaction(con);
for(CreatePrimaryUserBulkResult result : results) {
if (result.wasAlreadyAPrimaryUser) {
continue;
}
if(result.error != null) {
errorsByUserId.put(result.user.id, result.error);
continue;
}
result.primaryLoginMethod.isPrimary = true;
result.user.primaryUserId = result.primaryLoginMethod.superTokensUserId;
}
if(!errorsByUserId.isEmpty()) {
throw new StorageTransactionLogicException(new BulkImportBatchInsertException("create primary users errors", errorsByUserId));
}
return resultHolder;
} catch (UnknownUserIdException e) {
throw new StorageTransactionLogicException(e);
}
});
} catch (StorageTransactionLogicException e) {
throw new StorageQueryException(e.actualException);
}
}
public static AuthRecipeUserInfo[] getUsersByAccountInfo(TenantIdentifier tenantIdentifier,
Storage storage,
boolean doUnionOfAccountInfo, String email,
String phoneNumber, String thirdPartyId,
String thirdPartyUserId,
String webauthnCredentialId)
throws StorageQueryException {
Set<AuthRecipeUserInfo> result = loadAuthRecipeUserInfosByVariousIds(
tenantIdentifier, storage, email, phoneNumber, thirdPartyId, thirdPartyUserId, webauthnCredentialId);
if (doUnionOfAccountInfo) {
return mergeAuthRecipeUserInfosResultWithORMatch(result); // matches any of the provided: email, thirdparty, phone number, webauthnCredential
} else {
return mergeAuthRecipeUserInfosResultWithANDMatch(email, phoneNumber, thirdPartyId, thirdPartyUserId, webauthnCredentialId,
result); // matches all the provided: email, thirdparty, phone number, webauthnCredential
}
}
private static AuthRecipeUserInfo[] mergeAuthRecipeUserInfosResultWithANDMatch(String email, String phoneNumber,
String thirdPartyId, String thirdPartyUserId,
String webauthnCredentialId,
Set<AuthRecipeUserInfo> result) {
List<AuthRecipeUserInfo> finalList = new ArrayList<>();
for (AuthRecipeUserInfo user : result) {
boolean emailMatch = email == null;
boolean phoneNumberMatch = phoneNumber == null;
boolean thirdPartyMatch = thirdPartyId == null;
boolean webauthnCredentialIdMatch = webauthnCredentialId == null;
for (LoginMethod lM : user.loginMethods) {
if (email != null && email.equals(lM.email)) {
emailMatch = true;
}
if (phoneNumber != null && phoneNumber.equals(lM.phoneNumber)) {
phoneNumberMatch = true;
}
if (thirdPartyId != null &&
(new LoginMethod.ThirdParty(thirdPartyId, thirdPartyUserId)).equals(lM.thirdParty)) {
thirdPartyMatch = true;
}
if(webauthnCredentialId != null
&& lM.webauthN != null
&& lM.webauthN.credentialIds.contains(webauthnCredentialId)){
webauthnCredentialIdMatch = true;
}
}
if (emailMatch && phoneNumberMatch && thirdPartyMatch && webauthnCredentialIdMatch) {
finalList.add(user);
}
}
finalList.sort((o1, o2) -> {
if (o1.timeJoined < o2.timeJoined) {
return -1;
} else if (o1.timeJoined > o2.timeJoined) {
return 1;
}
return 0;
});
return finalList.toArray(new AuthRecipeUserInfo[0]);
}
private static AuthRecipeUserInfo[] mergeAuthRecipeUserInfosResultWithORMatch(Set<AuthRecipeUserInfo> result) {
AuthRecipeUserInfo[] finalResult = result.toArray(new AuthRecipeUserInfo[0]);
return Arrays.stream(finalResult).sorted((o1, o2) -> {
if (o1.timeJoined < o2.timeJoined) {
return -1;
} else if (o1.timeJoined > o2.timeJoined) {
return 1;
}
return 0;
}).toArray(AuthRecipeUserInfo[]::new);
}
@NotNull
private static Set<AuthRecipeUserInfo> loadAuthRecipeUserInfosByVariousIds(TenantIdentifier tenantIdentifier, Storage storage,
String email, String phoneNumber, String thirdPartyId,
String thirdPartyUserId, String webauthnCredentialId)
String thirdPartyUserId)
throws StorageQueryException {
Set<AuthRecipeUserInfo> result = new HashSet<>();
AuthRecipeSQLStorage authRecipeStorage = StorageUtils.getAuthRecipeStorage(storage);
if (email != null) {
AuthRecipeUserInfo[] users = authRecipeStorage
AuthRecipeUserInfo[] users = StorageUtils.getAuthRecipeStorage(storage)
.listPrimaryUsersByEmail(tenantIdentifier, email);
result.addAll(List.of(users));
}
if (phoneNumber != null) {
AuthRecipeUserInfo[] users = authRecipeStorage
AuthRecipeUserInfo[] users = StorageUtils.getAuthRecipeStorage(storage)
.listPrimaryUsersByPhoneNumber(tenantIdentifier, phoneNumber);
result.addAll(List.of(users));
}
if (thirdPartyId != null && thirdPartyUserId != null) {
AuthRecipeUserInfo user = authRecipeStorage
AuthRecipeUserInfo user = StorageUtils.getAuthRecipeStorage(storage)
.getPrimaryUserByThirdPartyInfo(tenantIdentifier, thirdPartyId, thirdPartyUserId);
if (user != null) {
result.add(user);
}
}
if(webauthnCredentialId != null){
AuthRecipeUserInfo user = authRecipeStorage
.getPrimaryUserByWebauthNCredentialId(tenantIdentifier, webauthnCredentialId);
if (user != null) {
result.add(user);
if (doUnionOfAccountInfo) {
AuthRecipeUserInfo[] finalResult = result.toArray(new AuthRecipeUserInfo[0]);
return Arrays.stream(finalResult).sorted((o1, o2) -> {
if (o1.timeJoined < o2.timeJoined) {
return -1;
} else if (o1.timeJoined > o2.timeJoined) {
return 1;
}
return 0;
}).toArray(AuthRecipeUserInfo[]::new);
} else {
List<AuthRecipeUserInfo> finalList = new ArrayList<>();
for (AuthRecipeUserInfo user : result) {
boolean emailMatch = email == null;
boolean phoneNumberMatch = phoneNumber == null;
boolean thirdPartyMatch = thirdPartyId == null;
for (LoginMethod lM : user.loginMethods) {
if (email != null && email.equals(lM.email)) {
emailMatch = true;
}
if (phoneNumber != null && phoneNumber.equals(lM.phoneNumber)) {
phoneNumberMatch = true;
}
if (thirdPartyId != null &&
(new LoginMethod.ThirdParty(thirdPartyId, thirdPartyUserId)).equals(lM.thirdParty)) {
thirdPartyMatch = true;
}
}
if (emailMatch && phoneNumberMatch && thirdPartyMatch) {
finalList.add(user);
}
}
finalList.sort((o1, o2) -> {
if (o1.timeJoined < o2.timeJoined) {
return -1;
} else if (o1.timeJoined > o2.timeJoined) {
return 1;
}
return 0;
});
return finalList.toArray(new AuthRecipeUserInfo[0]);
}
return result;
}
public static long getUsersCountForTenant(TenantIdentifier tenantIdentifier,
@ -1182,7 +693,7 @@ public class AuthRecipe {
RECIPE_ID[] includeRecipeIds) throws StorageQueryException {
try {
Storage storage = StorageLayer.getStorage(main);
return getUsersCountForTenant(ResourceDistributor.getAppForTesting(), storage, includeRecipeIds);
return getUsersCountForTenant(TenantIdentifier.BASE_TENANT, storage, includeRecipeIds);
} catch (TenantOrAppNotFoundException | BadPermissionException e) {
throw new IllegalStateException(e);
}
@ -1232,7 +743,7 @@ public class AuthRecipe {
throws StorageQueryException, UserPaginationToken.InvalidTokenException {
try {
Storage storage = StorageLayer.getStorage(main);
return getUsers(ResourceDistributor.getAppForTesting(), storage,
return getUsers(TenantIdentifier.BASE_TENANT, storage,
limit, timeJoinedOrder, paginationToken, includeRecipeIds, dashboardSearchTags);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -1390,7 +901,7 @@ public class AuthRecipe {
public static void deleteUser(Main main, String userId, boolean removeAllLinkedAccounts)
throws StorageQueryException, StorageTransactionLogicException {
Storage storage = StorageLayer.getStorage(main);
AppIdentifier appIdentifier = ResourceDistributor.getAppForTesting().toAppIdentifier();
AppIdentifier appIdentifier = new AppIdentifier(null, null);
UserIdMapping mapping = io.supertokens.useridmapping.UserIdMapping.getUserIdMapping(appIdentifier,
storage, userId, UserIdType.ANY);
@ -1401,7 +912,7 @@ public class AuthRecipe {
public static void deleteUser(Main main, String userId)
throws StorageQueryException, StorageTransactionLogicException {
Storage storage = StorageLayer.getStorage(main);
AppIdentifier appIdentifier = ResourceDistributor.getAppForTesting().toAppIdentifier();
AppIdentifier appIdentifier = new AppIdentifier(null, null);
UserIdMapping mapping = io.supertokens.useridmapping.UserIdMapping.getUserIdMapping(appIdentifier,
storage, userId, UserIdType.ANY);

View File

@ -1,27 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.authRecipe.exception;
public class BulkImportRecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException extends Exception {
public final String recipeUserId;
public BulkImportRecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException(String recipeUserId) {
super("The recipe user id '" + recipeUserId + "' is already linked with another primary user id");
this.recipeUserId = recipeUserId;
}
}

View File

@ -1,845 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.authRecipe.AuthRecipe;
import io.supertokens.authRecipe.exception.AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException;
import io.supertokens.authRecipe.exception.InputUserIdIsNotAPrimaryUserException;
import io.supertokens.authRecipe.exception.RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException;
import io.supertokens.authRecipe.exception.RecipeUserIdAlreadyLinkedWithPrimaryUserIdException;
import io.supertokens.config.Config;
import io.supertokens.emailpassword.EmailPassword;
import io.supertokens.emailpassword.PasswordHashing;
import io.supertokens.featureflag.exceptions.FeatureNotEnabledException;
import io.supertokens.multitenancy.Multitenancy;
import io.supertokens.multitenancy.exception.AnotherPrimaryUserWithEmailAlreadyExistsException;
import io.supertokens.multitenancy.exception.AnotherPrimaryUserWithPhoneNumberAlreadyExistsException;
import io.supertokens.multitenancy.exception.AnotherPrimaryUserWithThirdPartyInfoAlreadyExistsException;
import io.supertokens.output.Logging;
import io.supertokens.passwordless.Passwordless;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeUserInfo;
import io.supertokens.pluginInterface.bulkimport.BulkImportStorage.BULK_IMPORT_USER_STATUS;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.LoginMethod;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.TotpDevice;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.UserRole;
import io.supertokens.pluginInterface.bulkimport.ImportUserBase;
import io.supertokens.pluginInterface.bulkimport.exceptions.BulkImportBatchInsertException;
import io.supertokens.pluginInterface.bulkimport.sqlStorage.BulkImportSQLStorage;
import io.supertokens.pluginInterface.emailpassword.EmailPasswordImportUser;
import io.supertokens.pluginInterface.emailpassword.exceptions.DuplicateEmailException;
import io.supertokens.pluginInterface.emailpassword.exceptions.UnknownUserIdException;
import io.supertokens.pluginInterface.emailverification.sqlStorage.EmailVerificationSQLStorage;
import io.supertokens.pluginInterface.exceptions.DbInitException;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.passwordless.PasswordlessImportUser;
import io.supertokens.pluginInterface.passwordless.exception.DuplicatePhoneNumberException;
import io.supertokens.pluginInterface.sqlStorage.SQLStorage;
import io.supertokens.pluginInterface.thirdparty.ThirdPartyImportUser;
import io.supertokens.pluginInterface.thirdparty.exception.DuplicateThirdPartyUserException;
import io.supertokens.pluginInterface.totp.TOTPDevice;
import io.supertokens.pluginInterface.useridmapping.exception.UnknownSuperTokensUserIdException;
import io.supertokens.pluginInterface.useridmapping.exception.UserIdMappingAlreadyExistsException;
import io.supertokens.pluginInterface.userroles.exception.UnknownRoleException;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.thirdparty.ThirdParty;
import io.supertokens.totp.Totp;
import io.supertokens.useridmapping.UserIdMapping;
import io.supertokens.usermetadata.UserMetadata;
import io.supertokens.userroles.UserRoles;
import io.supertokens.utils.Utils;
import jakarta.servlet.ServletException;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
// Error codes ensure globally unique and identifiable errors in Bulk Import.
// Current range: E001 to E046.
public class BulkImport {
// Maximum number of users that can be added in a single /bulk-import/users POST request
public static final int MAX_USERS_TO_ADD = 10000;
// Maximum number of users to return in a single page when calling /bulk-import/users GET
public static final int GET_USERS_PAGINATION_MAX_LIMIT = 500;
// Default number of users to return when no specific limit is given in /bulk-import/users GET
public static final int GET_USERS_DEFAULT_LIMIT = 100;
// Maximum number of users that can be deleted in a single operation
public static final int DELETE_USERS_MAX_LIMIT = 500;
// Time interval in seconds between two consecutive runs of ProcessBulkImportUsers Cron Job
public static final int PROCESS_USERS_INTERVAL_SECONDS = 5*60; // 5 minutes
private static final Logger log = LoggerFactory.getLogger(BulkImport.class);
// This map allows reusing proxy storage for all tenants in the app and closing connections after import.
private static Map<String, SQLStorage> userPoolToStorageMap = new HashMap<>();
public static void addUsers(AppIdentifier appIdentifier, Storage storage, List<BulkImportUser> users)
throws StorageQueryException, TenantOrAppNotFoundException {
while (true) {
try {
StorageUtils.getBulkImportStorage(storage).addBulkImportUsers(appIdentifier, users);
break;
} catch (StorageQueryException sqe) {
if (sqe.getCause() instanceof io.supertokens.pluginInterface.bulkimport.exceptions.DuplicateUserIdException) {
// We re-generate the user id for every user and retry
for (BulkImportUser user : users) {
user.id = Utils.getUUID();
}
} else {
throw sqe;
}
}
}
}
public static BulkImportUserPaginationContainer getUsers(AppIdentifier appIdentifier, Storage storage,
int limit, @Nullable BULK_IMPORT_USER_STATUS status, @Nullable String paginationToken)
throws StorageQueryException, BulkImportUserPaginationToken.InvalidTokenException {
List<BulkImportUser> users;
BulkImportSQLStorage bulkImportStorage = StorageUtils.getBulkImportStorage(storage);
if (paginationToken == null) {
users = bulkImportStorage
.getBulkImportUsers(appIdentifier, limit + 1, status, null, null);
} else {
BulkImportUserPaginationToken tokenInfo = BulkImportUserPaginationToken.extractTokenInfo(paginationToken);
users = bulkImportStorage
.getBulkImportUsers(appIdentifier, limit + 1, status, tokenInfo.bulkImportUserId,
tokenInfo.createdAt);
}
String nextPaginationToken = null;
int maxLoop = users.size();
if (users.size() == limit + 1) {
maxLoop = limit;
BulkImportUser user = users.get(limit);
nextPaginationToken = new BulkImportUserPaginationToken(user.id, user.createdAt).generateToken();
}
List<BulkImportUser> resultUsers = users.subList(0, maxLoop);
return new BulkImportUserPaginationContainer(resultUsers, nextPaginationToken);
}
public static List<String> deleteUsers(AppIdentifier appIdentifier, Storage storage, String[] userIds)
throws StorageQueryException {
return StorageUtils.getBulkImportStorage(storage).deleteBulkImportUsers(appIdentifier, userIds);
}
public static long getBulkImportUsersCount(AppIdentifier appIdentifier, Storage storage,
@Nullable BULK_IMPORT_USER_STATUS status)
throws StorageQueryException {
return StorageUtils.getBulkImportStorage(storage).getBulkImportUsersCount(appIdentifier, status);
}
public static synchronized AuthRecipeUserInfo importUser(Main main, AppIdentifier appIdentifier,
BulkImportUser user)
throws StorageQueryException, InvalidConfigException, IOException, TenantOrAppNotFoundException,
DbInitException, BulkImportBatchInsertException {
// Since all the tenants of a user must share the storage, we will just use the
// storage of the first tenantId of the first loginMethod
TenantIdentifier firstTenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), user.loginMethods.get(0).tenantIds.get(0));
SQLStorage bulkImportProxyStorage = (SQLStorage) getBulkImportProxyStorage(main, firstTenantIdentifier);
LoginMethod primaryLM = BulkImportUserUtils.getPrimaryLoginMethod(user);
try {
return bulkImportProxyStorage.startTransaction(con -> {
try {
Storage[] allStoragesForApp = getAllProxyStoragesForApp(main, appIdentifier);
processUsersImportSteps(main, appIdentifier, bulkImportProxyStorage, List.of(user), allStoragesForApp);
bulkImportProxyStorage.commitTransactionForBulkImportProxyStorage();
AuthRecipeUserInfo importedUser = AuthRecipe.getUserById(appIdentifier, bulkImportProxyStorage,
primaryLM.superTokensUserId);
io.supertokens.useridmapping.UserIdMapping.populateExternalUserIdForUsers(appIdentifier,
bulkImportProxyStorage, new AuthRecipeUserInfo[] { importedUser });
return importedUser;
} catch (StorageTransactionLogicException e) {
// We need to rollback the transaction manually because we have overridden that in the proxy storage
bulkImportProxyStorage.rollbackTransactionForBulkImportProxyStorage();
throw e;
} finally {
closeAllProxyStorages();
}
});
} catch (StorageTransactionLogicException e) {
if(e.actualException instanceof BulkImportBatchInsertException){
throw (BulkImportBatchInsertException) e.actualException;
}
throw new StorageQueryException(e.actualException);
}
}
public static void processUsersImportSteps(Main main, AppIdentifier appIdentifier,
Storage bulkImportProxyStorage, List<BulkImportUser> users, Storage[] allStoragesForApp)
throws StorageTransactionLogicException {
try {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing login methods..");
processUsersLoginMethods(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing login methods DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating Primary users and linking accounts..");
createPrimaryUsersAndLinkAccounts(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating Primary users and linking accounts DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user id mappings..");
createMultipleUserIdMapping(appIdentifier, users, allStoragesForApp);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user id mappings DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Verifying email addresses..");
verifyMultipleEmailForAllLoginMethods(appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Verifying email addresses DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating TOTP devices..");
createMultipleTotpDevices(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating TOTP devices DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user metadata..");
createMultipleUserMetadata(appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user metadata DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user roles..");
createMultipleUserRoles(main, appIdentifier, bulkImportProxyStorage, users);
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Creating user roles DONE");
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Effective processUsersImportSteps DONE");
} catch ( StorageQueryException | FeatureNotEnabledException |
TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(e);
}
}
public static void processUsersLoginMethods(Main main, AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users) throws StorageTransactionLogicException {
//sort login methods together
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Sorting login methods by recipeId..");
Map<String, List<LoginMethod>> sortedLoginMethods = new HashMap<>();
for (BulkImportUser user: users) {
for(LoginMethod loginMethod : user.loginMethods){
if(!sortedLoginMethods.containsKey(loginMethod.recipeId)) {
sortedLoginMethods.put(loginMethod.recipeId, new ArrayList<>());
}
sortedLoginMethods.get(loginMethod.recipeId).add(loginMethod);
}
}
List<ImportUserBase> importedUsers = new ArrayList<>();
if (sortedLoginMethods.containsKey("emailpassword")) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing emailpassword login methods..");
importedUsers.addAll(
processEmailPasswordLoginMethods(main, storage, sortedLoginMethods.get("emailpassword"),
appIdentifier));
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing emailpassword login methods DONE");
}
if (sortedLoginMethods.containsKey("thirdparty")) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing thirdparty login methods..");
importedUsers.addAll(
processThirdpartyLoginMethods(main, storage, sortedLoginMethods.get("thirdparty"),
appIdentifier));
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing thirdparty login methods DONE");
}
if (sortedLoginMethods.containsKey("passwordless")) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing passwordless login methods..");
importedUsers.addAll(processPasswordlessLoginMethods(main, appIdentifier, storage,
sortedLoginMethods.get("passwordless")));
Logging.debug(main, TenantIdentifier.BASE_TENANT, "Processing passwordless login methods DONE");
}
Set<String> actualKeys = new HashSet<>(sortedLoginMethods.keySet());
List.of("emailpassword", "thirdparty", "passwordless").forEach(actualKeys::remove);
if(!actualKeys.isEmpty()){
throw new StorageTransactionLogicException(
new IllegalArgumentException("E001: Unknown recipeId(s) [" +
actualKeys.stream().map(s -> s+" ") + "] for loginMethod."));
}
Map<String, Exception> errorsById = new HashMap<>();
for (Map.Entry<String, List<LoginMethod>> loginMethodEntries : sortedLoginMethods.entrySet()) {
for (LoginMethod loginMethod : loginMethodEntries.getValue()) {
try {
associateUserToTenants(main, appIdentifier, storage, loginMethod, loginMethod.tenantIds.get(0));
} catch (StorageTransactionLogicException e){
errorsById.put(loginMethod.superTokensUserId, e.actualException);
}
}
}
if(!errorsById.isEmpty()){
throw new StorageTransactionLogicException(new BulkImportBatchInsertException("tenant association errors", errorsById));
}
}
private static List<? extends ImportUserBase> processPasswordlessLoginMethods(Main main, AppIdentifier appIdentifier, Storage storage,
List<LoginMethod> loginMethods)
throws StorageTransactionLogicException {
try {
List<PasswordlessImportUser> usersToImport = new ArrayList<>();
for (LoginMethod loginMethod : loginMethods) {
TenantIdentifier tenantIdentifierForLoginMethod = new TenantIdentifier(
appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), loginMethod.tenantIds.get(
0)); // the cron runs per app. The app stays the same, the tenant can change
usersToImport.add(new PasswordlessImportUser(loginMethod.superTokensUserId, loginMethod.phoneNumber,
loginMethod.email, tenantIdentifierForLoginMethod, loginMethod.timeJoinedInMSSinceEpoch));
}
Passwordless.createPasswordlessUsers(storage, usersToImport);
return usersToImport;
} catch (StorageQueryException | StorageTransactionLogicException e) {
Logging.debug(main, TenantIdentifier.BASE_TENANT, "exception: " + e.getMessage());
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof DuplicateEmailException) {
String message = "E006: A user with email "
+ loginMethods.stream()
.filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get().email + " already exists in passwordless loginMethod.";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof DuplicatePhoneNumberException) {
String message = "E007: A user with phoneNumber "
+ loginMethods.stream()
.filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get().phoneNumber + " already exists in passwordless loginMethod.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E008: " + e.getMessage()));
}
}
private static List<? extends ImportUserBase> processThirdpartyLoginMethods(Main main, Storage storage, List<LoginMethod> loginMethods,
AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
List<ThirdPartyImportUser> usersToImport = new ArrayList<>();
for (LoginMethod loginMethod: loginMethods){
TenantIdentifier tenantIdentifierForLoginMethod = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), loginMethod.tenantIds.get(0)); // the cron runs per app. The app stays the same, the tenant can change
usersToImport.add(new ThirdPartyImportUser(loginMethod.email, loginMethod.superTokensUserId, loginMethod.thirdPartyId,
loginMethod.thirdPartyUserId, tenantIdentifierForLoginMethod, loginMethod.timeJoinedInMSSinceEpoch));
}
ThirdParty.createMultipleThirdPartyUsers(storage, usersToImport);
return usersToImport;
} catch (StorageQueryException | StorageTransactionLogicException e) {
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof DuplicateThirdPartyUserException) {
LoginMethod loginMethodForError = loginMethods.stream()
.filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get();
String message = "E005: A user with thirdPartyId " + loginMethodForError.thirdPartyId
+ " and thirdPartyUserId " + loginMethodForError.thirdPartyUserId
+ " already exists in thirdparty loginMethod.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E004: " + e.getMessage()));
}
}
private static List<? extends ImportUserBase> processEmailPasswordLoginMethods(Main main, Storage storage, List<LoginMethod> loginMethods,
AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
//prepare data for batch import
List<EmailPasswordImportUser> usersToImport = new ArrayList<>();
for(LoginMethod emailPasswordLoginMethod : loginMethods) {
TenantIdentifier tenantIdentifierForLoginMethod = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), emailPasswordLoginMethod.tenantIds.get(0)); // the cron runs per app. The app stays the same, the tenant can change
String passwordHash = emailPasswordLoginMethod.passwordHash;
if (passwordHash == null && emailPasswordLoginMethod.plainTextPassword != null) {
passwordHash = PasswordHashing.getInstance(main)
.createHashWithSalt(tenantIdentifierForLoginMethod.toAppIdentifier(), emailPasswordLoginMethod.plainTextPassword);
}
emailPasswordLoginMethod.passwordHash = passwordHash;
usersToImport.add(new EmailPasswordImportUser(emailPasswordLoginMethod.superTokensUserId, emailPasswordLoginMethod.email,
emailPasswordLoginMethod.passwordHash, tenantIdentifierForLoginMethod, emailPasswordLoginMethod.timeJoinedInMSSinceEpoch));
}
EmailPassword.createMultipleUsersWithPasswordHash(storage, usersToImport);
return usersToImport;
} catch (StorageQueryException | StorageTransactionLogicException e) {
if(e.getCause() instanceof BulkImportBatchInsertException){
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for(String userid : errorsByPosition.keySet()){
Exception exception = errorsByPosition.get(userid);
if(exception instanceof DuplicateEmailException){
String message = "E003: A user with email "
+ loginMethods.stream().filter(loginMethod -> loginMethod.superTokensUserId.equals(userid))
.findFirst().get().email + " already exists in emailpassword loginMethod.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E002: " + e.getMessage()));
}
}
private static void associateUserToTenants(Main main, AppIdentifier appIdentifier, Storage storage, LoginMethod lm,
String firstTenant) throws StorageTransactionLogicException {
for (String tenantId : lm.tenantIds) {
try {
if (tenantId.equals(firstTenant)) {
continue;
}
TenantIdentifier tenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), tenantId);
Multitenancy.addUserIdToTenant(main, tenantIdentifier, storage, lm.superTokensUserId);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E009: " + e.getMessage()));
} catch (StorageQueryException e) {
throw new StorageTransactionLogicException(e);
} catch (UnknownUserIdException e) {
throw new StorageTransactionLogicException(new Exception("E010: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but it doesn't exist. This should not happen. Please contact support."));
} catch (AnotherPrimaryUserWithEmailAlreadyExistsException e) {
throw new StorageTransactionLogicException(new Exception("E011: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another primary user with email " + lm.email + " already exists."));
} catch (AnotherPrimaryUserWithPhoneNumberAlreadyExistsException e) {
throw new StorageTransactionLogicException(new Exception("E012: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another primary user with phoneNumber " + lm.phoneNumber + " already exists."));
} catch (AnotherPrimaryUserWithThirdPartyInfoAlreadyExistsException e) {
throw new StorageTransactionLogicException(new Exception("E013: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another primary user with thirdPartyId " + lm.thirdPartyId + " and thirdPartyUserId "
+ lm.thirdPartyUserId + " already exists."));
} catch (DuplicateEmailException e) {
throw new StorageTransactionLogicException(new Exception("E014: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another user with email " + lm.email + " already exists."));
} catch (DuplicatePhoneNumberException e) {
throw new StorageTransactionLogicException(new Exception("E015: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another user with phoneNumber " + lm.phoneNumber + " already exists."));
} catch (DuplicateThirdPartyUserException e) {
throw new StorageTransactionLogicException(new Exception("E016: " + "We tried to add the userId "
+ lm.getSuperTokenOrExternalUserId() + " to the tenantId " + tenantId
+ " but another user with thirdPartyId " + lm.thirdPartyId + " and thirdPartyUserId "
+ lm.thirdPartyUserId + " already exists."));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E017: " + e.getMessage()));
}
}
}
private static void createPrimaryUsersAndLinkAccounts(Main main,
AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users)
throws StorageTransactionLogicException, StorageQueryException, FeatureNotEnabledException,
TenantOrAppNotFoundException {
List<BulkImportUser> usersForAccountLinking = filterUsersInNeedOfAccountLinking(users);
if(usersForAccountLinking.isEmpty()){
return;
}
AuthRecipe.CreatePrimaryUsersResultHolder resultHolder;
try {
resultHolder = AuthRecipe.createPrimaryUsersForBulkImport(main, appIdentifier, storage, usersForAccountLinking);
} catch (StorageQueryException e) {
if(e.getCause() instanceof BulkImportBatchInsertException){
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof UnknownUserIdException) {
String message = "E020: We tried to create the primary user for the userId "
+ userid
+ " but it doesn't exist. This should not happen. Please contact support.";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof RecipeUserIdAlreadyLinkedWithPrimaryUserIdException) {
String message = "E021: We tried to create the primary user for the userId "
+ userid
+ " but it is already linked with another primary user.";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException) {
String message = "E022: We tried to create the primary user for the userId "
+ userid
+ " but the account info is already associated with another primary user.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E018: " + e.getMessage()));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E019: " + e.getMessage()));
}
if(resultHolder != null && resultHolder.usersWithSameExtraData != null){
linkAccountsForMultipleUser(main, appIdentifier, storage, usersForAccountLinking, resultHolder.usersWithSameExtraData);
}
}
private static List<BulkImportUser> filterUsersInNeedOfAccountLinking(List<BulkImportUser> allUsers) {
if (allUsers == null || allUsers.isEmpty()) {
return Collections.emptyList();
}
return allUsers.stream().filter(bulkImportUser -> bulkImportUser.loginMethods.stream()
.anyMatch(loginMethod -> loginMethod.isPrimary) || bulkImportUser.loginMethods.size() > 1)
.collect(Collectors.toList());
}
private static void linkAccountsForMultipleUser(Main main, AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users, List<AuthRecipeUserInfo> allUsersWithSameExtraData)
throws StorageTransactionLogicException {
try {
AuthRecipe.linkMultipleAccountsForBulkImport(main, appIdentifier, storage,
users, allUsersWithSameExtraData);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E023: " + e.getMessage()));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E024: " + e.getMessage()));
} catch (StorageQueryException e) {
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, String> recipeUserIdByPrimaryUserId = BulkImportUserUtils.collectRecipeIdsToPrimaryIds(users);
Map<String, Exception> errorByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userId : errorByPosition.keySet()) {
Exception currentException = errorByPosition.get(userId);
String recipeUID = recipeUserIdByPrimaryUserId.get(userId);
if (currentException instanceof UnknownUserIdException) {
String message = "E025: We tried to link the userId " + recipeUID
+ " to the primary userId " + userId
+ " but it doesn't exist.";
errorByPosition.put(userId, new Exception(message));
} else if (currentException instanceof InputUserIdIsNotAPrimaryUserException) {
String message = "E026: We tried to link the userId " + recipeUID
+ " to the primary userId " + userId
+ " but it is not a primary user.";
errorByPosition.put(userId, new Exception(message));
} else if (currentException instanceof AccountInfoAlreadyAssociatedWithAnotherPrimaryUserIdException) {
String message = "E027: We tried to link the userId " + userId
+ " to the primary userId " + recipeUID
+ " but the account info is already associated with another primary user.";
errorByPosition.put(userId, new Exception(message));
} else if (currentException instanceof RecipeUserIdAlreadyLinkedWithAnotherPrimaryUserIdException) {
String message = "E028: We tried to link the userId " + recipeUID
+ " to the primary userId " + userId
+ " but it is already linked with another primary user.";
errorByPosition.put(userId, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("link accounts translated", errorByPosition));
}
throw new StorageTransactionLogicException(e);
}
}
public static void createMultipleUserIdMapping(AppIdentifier appIdentifier,
List<BulkImportUser> users, Storage[] storages) throws StorageTransactionLogicException {
Map<String, String> superTokensUserIdToExternalUserId = new HashMap<>();
for(BulkImportUser user: users) {
if(user.externalUserId != null) {
LoginMethod primaryLoginMethod = BulkImportUserUtils.getPrimaryLoginMethod(user);
superTokensUserIdToExternalUserId.put(primaryLoginMethod.superTokensUserId, user.externalUserId);
primaryLoginMethod.externalUserId = user.externalUserId;
}
}
try {
if(!superTokensUserIdToExternalUserId.isEmpty()) {
List<UserIdMapping.UserIdBulkMappingResult> mappingResults = UserIdMapping.createMultipleUserIdMappings(
appIdentifier, storages,
superTokensUserIdToExternalUserId,
false, true);
}
} catch (StorageQueryException e) {
if(e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof ServletException) {
String message = "E030: " + e.getMessage();
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof UserIdMappingAlreadyExistsException) {
String message = "E031: A user with externalId " + superTokensUserIdToExternalUserId.get(userid) + " already exists";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof UnknownSuperTokensUserIdException) {
String message = "E032: We tried to create the externalUserId mapping for the superTokenUserId "
+ userid
+ " but it doesn't exist. This should not happen. Please contact support.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
}
}
public static void createMultipleUserMetadata(AppIdentifier appIdentifier, Storage storage, List<BulkImportUser> users)
throws StorageTransactionLogicException {
Map<String, JsonObject> usersMetadata = new HashMap<>();
for(BulkImportUser user: users) {
if (user.userMetadata != null) {
usersMetadata.put(BulkImportUserUtils.getPrimaryLoginMethod(user).getSuperTokenOrExternalUserId(), user.userMetadata);
}
}
try {
if(!usersMetadata.isEmpty()) {
UserMetadata.updateMultipleUsersMetadata(appIdentifier, storage, usersMetadata);
}
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E040: " + e.getMessage()));
} catch (StorageQueryException e) {
throw new StorageTransactionLogicException(e);
}
}
public static void createMultipleUserRoles(Main main, AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users) throws StorageTransactionLogicException {
Map<TenantIdentifier, Map<String, List<String>>> rolesToUserByTenant = gatherRolesForUsersByTenant(appIdentifier, users);
try {
if(!rolesToUserByTenant.isEmpty()){
UserRoles.addMultipleRolesToMultipleUsers(main, appIdentifier, storage, rolesToUserByTenant);
}
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E033: " + e.getMessage()));
} catch (StorageTransactionLogicException e) {
if(e.actualException instanceof BulkImportBatchInsertException){
Map<String, Exception> errorsByPosition = ((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof UnknownRoleException) {
String message = "E034: Role does not exist! You need to pre-create the role before " +
"assigning it to the user.";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(new BulkImportBatchInsertException("roles errors translated", errorsByPosition));
} else {
throw new StorageTransactionLogicException(e);
}
}
}
private static Map<TenantIdentifier, Map<String, List<String>>> gatherRolesForUsersByTenant(AppIdentifier appIdentifier, List<BulkImportUser> users) {
Map<TenantIdentifier, Map<String, List<String>>> rolesToUserByTenant = new HashMap<>();
for (BulkImportUser user : users) {
if (user.userRoles != null) {
for (UserRole userRole : user.userRoles) {
for (String tenantId : userRole.tenantIds) {
TenantIdentifier tenantIdentifier = new TenantIdentifier(
appIdentifier.getConnectionUriDomain(), appIdentifier.getAppId(),
tenantId);
if(!rolesToUserByTenant.containsKey(tenantIdentifier)){
rolesToUserByTenant.put(tenantIdentifier, new HashMap<>());
}
String userIdToUse = user.externalUserId != null ?
user.externalUserId : user.id;
if(!rolesToUserByTenant.get(tenantIdentifier).containsKey(userIdToUse)){
rolesToUserByTenant.get(tenantIdentifier).put(userIdToUse, new ArrayList<>());
}
rolesToUserByTenant.get(tenantIdentifier).get(userIdToUse).add(userRole.role);
}
}
}
}
return rolesToUserByTenant;
}
public static void verifyMultipleEmailForAllLoginMethods(AppIdentifier appIdentifier, Storage storage,
List<BulkImportUser> users)
throws StorageTransactionLogicException {
Map<String, String> emailToUserId = collectVerifiedEmailAddressesByUserIds(users);
try {
verifyCollectedEmailAddressesForUsers(appIdentifier, storage, emailToUserId);
} catch (StorageQueryException | StorageTransactionLogicException e) {
if (e.getCause() instanceof BulkImportBatchInsertException) {
Map<String, Exception> errorsByPosition =
((BulkImportBatchInsertException) e.getCause()).exceptionByUserId;
for (String userid : errorsByPosition.keySet()) {
Exception exception = errorsByPosition.get(userid);
if (exception instanceof DuplicateEmailException) {
String message =
"E043: Email " + errorsByPosition.get(userid) + " is already verified for the user";
errorsByPosition.put(userid, new Exception(message));
} else if (exception instanceof NullPointerException) {
String message = "E044: null email address was found for the userId " + userid +
" while verifying the email";
errorsByPosition.put(userid, new Exception(message));
}
}
throw new StorageTransactionLogicException(
new BulkImportBatchInsertException("translated", errorsByPosition));
}
throw new StorageTransactionLogicException(e);
}
}
private static void verifyCollectedEmailAddressesForUsers(AppIdentifier appIdentifier, Storage storage,
Map<String, String> emailToUserId)
throws StorageQueryException, StorageTransactionLogicException {
if(!emailToUserId.isEmpty()) {
EmailVerificationSQLStorage emailVerificationSQLStorage = StorageUtils
.getEmailVerificationStorage(storage);
emailVerificationSQLStorage.startTransaction(con -> {
emailVerificationSQLStorage
.updateMultipleIsEmailVerified_Transaction(appIdentifier, con,
emailToUserId, true); //only the verified email addresses are expected to be in the map
emailVerificationSQLStorage.commitTransaction(con);
return null;
});
}
}
@NotNull
private static Map<String, String> collectVerifiedEmailAddressesByUserIds(List<BulkImportUser> users) {
Map<String, String> emailToUserId = new LinkedHashMap<>();
for (BulkImportUser user : users) {
for (LoginMethod lm : user.loginMethods) {
//we skip passwordless` 'null' email addresses
if (lm.isVerified && !(lm.recipeId.equals("passwordless") && lm.email == null)) {
//collect the verified email addresses for the userId
emailToUserId.put(lm.getSuperTokenOrExternalUserId(), lm.email);
}
}
}
return emailToUserId;
}
public static void createMultipleTotpDevices(Main main, AppIdentifier appIdentifier,
Storage storage, List<BulkImportUser> users)
throws StorageTransactionLogicException {
List<TOTPDevice> devices = new ArrayList<>();
for (BulkImportUser user : users) {
if (user.totpDevices != null) {
for(TotpDevice device : user.totpDevices){
TOTPDevice totpDevice = new TOTPDevice(BulkImportUserUtils.getPrimaryLoginMethod(user).getSuperTokenOrExternalUserId(),
device.deviceName, device.secretKey, device.period, device.skew, true,
System.currentTimeMillis());
devices.add(totpDevice);
}
}
}
try {
if(!devices.isEmpty()){
Totp.createDevices(main, appIdentifier, storage, devices);
}
} catch (StorageQueryException e) {
throw new StorageTransactionLogicException(new Exception("E036: " + e.getMessage()));
} catch (FeatureNotEnabledException e) {
throw new StorageTransactionLogicException(new Exception("E037: " + e.getMessage()));
}
}
private static synchronized Storage getBulkImportProxyStorage(Main main, TenantIdentifier tenantIdentifier)
throws InvalidConfigException, IOException, TenantOrAppNotFoundException, DbInitException {
String userPoolId = StorageLayer.getStorage(tenantIdentifier, main).getUserPoolId();
if (userPoolToStorageMap.containsKey(userPoolId)) {
return userPoolToStorageMap.get(userPoolId);
}
TenantConfig[] allTenants = Multitenancy.getAllTenants(main);
Map<ResourceDistributor.KeyClass, JsonObject> normalisedConfigs = Config.getNormalisedConfigsForAllTenants(
allTenants,
Config.getBaseConfigAsJsonObject(main));
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
if (key.getTenantIdentifier().equals(tenantIdentifier)) {
SQLStorage bulkImportProxyStorage = (SQLStorage) StorageLayer.getNewBulkImportProxyStorageInstance(main,
normalisedConfigs.get(key), tenantIdentifier, true);
userPoolToStorageMap.put(userPoolId, bulkImportProxyStorage);
bulkImportProxyStorage.initStorage(false, new ArrayList<>());
return bulkImportProxyStorage;
}
}
throw new TenantOrAppNotFoundException(tenantIdentifier);
}
private static Storage[] getAllProxyStoragesForApp(Main main, AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
List<Storage> allProxyStorages = new ArrayList<>();
TenantConfig[] tenantConfigs = Multitenancy.getAllTenantsForApp(appIdentifier, main);
for (TenantConfig tenantConfig : tenantConfigs) {
allProxyStorages.add(getBulkImportProxyStorage(main, tenantConfig.tenantIdentifier));
}
return allProxyStorages.toArray(new Storage[0]);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E039: " + e.getMessage()));
} catch (InvalidConfigException e) {
throw new StorageTransactionLogicException(new InvalidConfigException("E040: " + e.getMessage()));
} catch (DbInitException e) {
throw new StorageTransactionLogicException(new DbInitException("E041: " + e.getMessage()));
} catch (IOException e) {
throw new StorageTransactionLogicException(new IOException("E042: " + e.getMessage()));
}
}
private static void closeAllProxyStorages() throws StorageQueryException {
for (SQLStorage storage : userPoolToStorageMap.values()) {
storage.closeConnectionForBulkImportProxyStorage();
storage.close();
}
userPoolToStorageMap.clear();
}
}

View File

@ -1,34 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
public class BulkImportUserPaginationContainer {
public final List<BulkImportUser> users;
public final String nextPaginationToken;
public BulkImportUserPaginationContainer(@Nonnull List<BulkImportUser> users, @Nullable String nextPaginationToken) {
this.users = users;
this.nextPaginationToken = nextPaginationToken;
}
}

View File

@ -1,53 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport;
import java.util.Base64;
public class BulkImportUserPaginationToken {
public final String bulkImportUserId;
public final long createdAt;
public BulkImportUserPaginationToken(String bulkImportUserId, long createdAt) {
this.bulkImportUserId = bulkImportUserId;
this.createdAt = createdAt;
}
public static BulkImportUserPaginationToken extractTokenInfo(String token) throws InvalidTokenException {
try {
String decodedPaginationToken = new String(Base64.getDecoder().decode(token));
String[] splitDecodedToken = decodedPaginationToken.split(";");
if (splitDecodedToken.length != 2) {
throw new InvalidTokenException();
}
String bulkImportUserId = splitDecodedToken[0];
long createdAt = Long.parseLong(splitDecodedToken[1]);
return new BulkImportUserPaginationToken(bulkImportUserId, createdAt);
} catch (Exception e) {
throw new InvalidTokenException();
}
}
public String generateToken() {
return new String(Base64.getEncoder().encode((this.bulkImportUserId + ";" + this.createdAt).getBytes()));
}
public static class InvalidTokenException extends Exception {
private static final long serialVersionUID = 6289026174830695478L;
}
}

View File

@ -1,654 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.bulkimport.exceptions.InvalidBulkImportDataException;
import io.supertokens.config.CoreConfig;
import io.supertokens.emailpassword.PasswordHashingUtils;
import io.supertokens.emailpassword.exceptions.UnsupportedPasswordHashingFormatException;
import io.supertokens.featureflag.EE_FEATURES;
import io.supertokens.featureflag.FeatureFlag;
import io.supertokens.multitenancy.Multitenancy;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.LoginMethod;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.TotpDevice;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser.UserRole;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.utils.JsonValidatorUtils.ValueType;
import io.supertokens.utils.Utils;
import java.util.*;
import static io.supertokens.utils.JsonValidatorUtils.parseAndValidateFieldType;
import static io.supertokens.utils.JsonValidatorUtils.validateJsonFieldType;
public class BulkImportUserUtils {
private String[] allUserRoles;
private Set<String> allExternalUserIds;
public BulkImportUserUtils(String[] allUserRoles) {
this.allUserRoles = allUserRoles;
this.allExternalUserIds = new HashSet<>();
}
public BulkImportUser createBulkImportUserFromJSON(Main main, AppIdentifier appIdentifier, JsonObject userData, IDMode idMode)
throws InvalidBulkImportDataException, StorageQueryException, TenantOrAppNotFoundException {
List<String> errors = new ArrayList<>();
String externalUserId = parseAndValidateFieldType(userData, "externalUserId", ValueType.STRING, false,
String.class,
errors, ".");
JsonObject userMetadata = parseAndValidateFieldType(userData, "userMetadata", ValueType.OBJECT, false,
JsonObject.class, errors, ".");
List<UserRole> userRoles = getParsedUserRoles(main, appIdentifier, userData, errors);
List<TotpDevice> totpDevices = getParsedTotpDevices(main, appIdentifier, userData, errors);
List<LoginMethod> loginMethods = getParsedLoginMethods(main, appIdentifier, userData, errors, idMode);
externalUserId = validateAndNormaliseExternalUserId(externalUserId, errors);
validateTenantIdsForRoleAndLoginMethods(main, appIdentifier, userRoles, loginMethods, errors);
if (!errors.isEmpty()) {
throw new InvalidBulkImportDataException(errors);
}
String id = getPrimaryLoginMethod(loginMethods).superTokensUserId;
return new BulkImportUser(id, externalUserId, userMetadata, userRoles, totpDevices, loginMethods);
}
private List<UserRole> getParsedUserRoles(Main main, AppIdentifier appIdentifier, JsonObject userData,
List<String> errors) throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray jsonUserRoles = parseAndValidateFieldType(userData, "userRoles", ValueType.ARRAY_OF_OBJECT, false,
JsonArray.class, errors, ".");
if (jsonUserRoles == null) {
return null;
}
List<UserRole> userRoles = new ArrayList<>();
for (JsonElement jsonUserRoleEl : jsonUserRoles) {
JsonObject jsonUserRole = jsonUserRoleEl.getAsJsonObject();
String role = parseAndValidateFieldType(jsonUserRole, "role", ValueType.STRING, true, String.class, errors,
" for a user role.");
JsonArray jsonTenantIds = parseAndValidateFieldType(jsonUserRole, "tenantIds", ValueType.ARRAY_OF_STRING,
true, JsonArray.class, errors, " for a user role.");
role = validateAndNormaliseUserRole(role, errors);
List<String> normalisedTenantIds = validateAndNormaliseTenantIds(main, appIdentifier, jsonTenantIds, errors,
" for a user role.");
if (role != null && normalisedTenantIds != null) {
userRoles.add(new UserRole(role, normalisedTenantIds));
}
}
return userRoles;
}
private List<TotpDevice> getParsedTotpDevices(Main main, AppIdentifier appIdentifier, JsonObject userData,
List<String> errors) throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray jsonTotpDevices = parseAndValidateFieldType(userData, "totpDevices", ValueType.ARRAY_OF_OBJECT, false,
JsonArray.class, errors, ".");
if (jsonTotpDevices == null) {
return null;
}
if (Arrays.stream(FeatureFlag.getInstance(main, appIdentifier).getEnabledFeatures())
.noneMatch(t -> t == EE_FEATURES.MFA)) {
errors.add("MFA must be enabled to import totp devices.");
return null;
}
List<TotpDevice> totpDevices = new ArrayList<>();
for (JsonElement jsonTotpDeviceEl : jsonTotpDevices) {
JsonObject jsonTotpDevice = jsonTotpDeviceEl.getAsJsonObject();
String secretKey = parseAndValidateFieldType(jsonTotpDevice, "secretKey", ValueType.STRING, true,
String.class, errors, " for a totp device.");
Integer period = parseAndValidateFieldType(jsonTotpDevice, "period", ValueType.INTEGER, false,
Integer.class, errors, " for a totp device.");
Integer skew = parseAndValidateFieldType(jsonTotpDevice, "skew", ValueType.INTEGER, false, Integer.class,
errors, " for a totp device.");
String deviceName = parseAndValidateFieldType(jsonTotpDevice, "deviceName", ValueType.STRING, false,
String.class, errors, " for a totp device.");
secretKey = validateAndNormaliseTotpSecretKey(secretKey, errors);
period = validateAndNormaliseTotpPeriod(period, errors);
skew = validateAndNormaliseTotpSkew(skew, errors);
deviceName = validateAndNormaliseTotpDeviceName(deviceName, errors);
if (secretKey != null && period != null && skew != null) {
totpDevices.add(new TotpDevice(secretKey, period, skew, deviceName));
}
}
return totpDevices;
}
private List<LoginMethod> getParsedLoginMethods(Main main, AppIdentifier appIdentifier, JsonObject userData,
List<String> errors, IDMode idMode)
throws StorageQueryException, TenantOrAppNotFoundException {
JsonArray jsonLoginMethods = parseAndValidateFieldType(userData, "loginMethods", ValueType.ARRAY_OF_OBJECT,
true, JsonArray.class, errors, ".");
if (jsonLoginMethods == null) {
return new ArrayList<>();
}
if (jsonLoginMethods.size() == 0) {
errors.add("At least one loginMethod is required.");
return new ArrayList<>();
}
if (jsonLoginMethods.size() > 1) {
if (!Utils.isAccountLinkingEnabled(main, appIdentifier)) {
errors.add("Account linking must be enabled to import multiple loginMethods.");
}
}
validateAndNormaliseIsPrimaryField(jsonLoginMethods, errors);
List<LoginMethod> loginMethods = new ArrayList<>();
for (JsonElement jsonLoginMethod : jsonLoginMethods) {
JsonObject jsonLoginMethodObj = jsonLoginMethod.getAsJsonObject();
String recipeId = parseAndValidateFieldType(jsonLoginMethodObj, "recipeId", ValueType.STRING, true,
String.class, errors, " for a loginMethod.");
JsonArray tenantIds = parseAndValidateFieldType(jsonLoginMethodObj, "tenantIds", ValueType.ARRAY_OF_STRING,
false, JsonArray.class, errors, " for a loginMethod.");
Boolean isVerified = parseAndValidateFieldType(jsonLoginMethodObj, "isVerified", ValueType.BOOLEAN, false,
Boolean.class, errors, " for a loginMethod.");
Boolean isPrimary = parseAndValidateFieldType(jsonLoginMethodObj, "isPrimary", ValueType.BOOLEAN, false,
Boolean.class, errors, " for a loginMethod.");
Long timeJoined = parseAndValidateFieldType(jsonLoginMethodObj, "timeJoinedInMSSinceEpoch", ValueType.LONG,
false, Long.class, errors, " for a loginMethod");
recipeId = validateAndNormaliseRecipeId(recipeId, errors);
List<String> normalisedTenantIds = validateAndNormaliseTenantIds(main, appIdentifier, tenantIds, errors,
" for " + recipeId + " recipe.");
isPrimary = validateAndNormaliseIsPrimary(isPrimary);
isVerified = validateAndNormaliseIsVerified(isVerified);
long timeJoinedInMSSinceEpoch = validateAndNormaliseTimeJoined(timeJoined, errors);
String supertokensUserId = switch (idMode) {
case READ_STORED -> parseAndValidateFieldType(jsonLoginMethodObj, "superTokensUserId", ValueType.STRING,
true, String.class, errors, " for a loginMethod");
case GENERATE -> Utils.getUUID();
};
if ("emailpassword".equals(recipeId)) {
String email = parseAndValidateFieldType(jsonLoginMethodObj, "email", ValueType.STRING, true,
String.class, errors, " for an emailpassword recipe.");
String passwordHash = parseAndValidateFieldType(jsonLoginMethodObj, "passwordHash", ValueType.STRING,
false, String.class, errors, " for an emailpassword recipe.");
String hashingAlgorithm = parseAndValidateFieldType(jsonLoginMethodObj, "hashingAlgorithm",
ValueType.STRING, false, String.class, errors, " for an emailpassword recipe.");
String plainTextPassword = parseAndValidateFieldType(jsonLoginMethodObj, "plainTextPassword",
ValueType.STRING, false, String.class, errors, " for an emailpassword recipe.");
if ((passwordHash == null || hashingAlgorithm == null) && plainTextPassword == null) {
errors.add("Either (passwordHash, hashingAlgorithm) or plainTextPassword is required for an emailpassword recipe.");
}
email = validateAndNormaliseEmail(email, errors);
CoreConfig.PASSWORD_HASHING_ALG normalisedHashingAlgorithm = validateAndNormaliseHashingAlgorithm(
hashingAlgorithm, errors);
hashingAlgorithm = normalisedHashingAlgorithm != null ? normalisedHashingAlgorithm.toString()
: hashingAlgorithm;
passwordHash = validateAndNormalisePasswordHash(main, appIdentifier, normalisedHashingAlgorithm,
passwordHash, errors);
loginMethods.add(new LoginMethod(normalisedTenantIds, recipeId, isVerified, isPrimary,
timeJoinedInMSSinceEpoch, email, passwordHash, hashingAlgorithm, plainTextPassword,
null, null, null, supertokensUserId));
} else if ("thirdparty".equals(recipeId)) {
String email = parseAndValidateFieldType(jsonLoginMethodObj, "email", ValueType.STRING, true,
String.class, errors, " for a thirdparty recipe.");
String thirdPartyId = parseAndValidateFieldType(jsonLoginMethodObj, "thirdPartyId", ValueType.STRING,
true, String.class, errors, " for a thirdparty recipe.");
String thirdPartyUserId = parseAndValidateFieldType(jsonLoginMethodObj, "thirdPartyUserId",
ValueType.STRING, true, String.class, errors, " for a thirdparty recipe.");
email = validateAndNormaliseEmail(email, errors);
thirdPartyId = validateAndNormaliseThirdPartyId(thirdPartyId, errors);
thirdPartyUserId = validateAndNormaliseThirdPartyUserId(thirdPartyUserId, errors);
loginMethods.add(new LoginMethod(normalisedTenantIds, recipeId, isVerified, isPrimary,
timeJoinedInMSSinceEpoch, email, null, null, null,
thirdPartyId, thirdPartyUserId, null, supertokensUserId));
} else if ("passwordless".equals(recipeId)) {
String email = parseAndValidateFieldType(jsonLoginMethodObj, "email", ValueType.STRING, false,
String.class, errors, " for a passwordless recipe.");
String phoneNumber = parseAndValidateFieldType(jsonLoginMethodObj, "phoneNumber", ValueType.STRING,
false, String.class, errors, " for a passwordless recipe.");
email = validateAndNormaliseEmail(email, errors);
phoneNumber = validateAndNormalisePhoneNumber(phoneNumber, errors);
if (email == null && phoneNumber == null) {
errors.add("Either email or phoneNumber is required for a passwordless recipe.");
}
loginMethods.add(new LoginMethod(normalisedTenantIds, recipeId, isVerified, isPrimary,
timeJoinedInMSSinceEpoch, email, null, null, null,
null, null, phoneNumber, supertokensUserId));
}
}
return loginMethods;
}
private String validateAndNormaliseExternalUserId(String externalUserId, List<String> errors) {
if (externalUserId == null) {
return null;
}
if (externalUserId.length() > 128) {
errors.add("externalUserId " + externalUserId + " is too long. Max length is 128.");
}
if (!allExternalUserIds.add(externalUserId)) {
errors.add("externalUserId " + externalUserId + " is not unique. It is already used by another user.");
}
// We just trim the externalUserId as per the UpdateExternalUserIdInfoAPI.java
return externalUserId.trim();
}
private String validateAndNormaliseUserRole(String role, List<String> errors) {
if (role.length() > 255) {
errors.add("role " + role + " is too long. Max length is 255.");
}
// We just trim the role as per the CreateRoleAPI.java
String normalisedRole = role.trim();
if (!Arrays.asList(allUserRoles).contains(normalisedRole)) {
errors.add("Role " + normalisedRole + " does not exist.");
}
return normalisedRole;
}
private String validateAndNormaliseTotpSecretKey(String secretKey, List<String> errors) {
if (secretKey == null) {
return null;
}
if (secretKey.length() > 256) {
errors.add("TOTP secretKey " + secretKey + " is too long. Max length is 256.");
}
// We don't perform any normalisation on the secretKey in ImportTotpDeviceAPI.java
return secretKey;
}
private Integer validateAndNormaliseTotpPeriod(Integer period, List<String> errors) {
// We default to 30 if period is null
if (period == null) {
return 30;
}
if (period.intValue() < 1) {
errors.add("period should be > 0 for a totp device.");
return null;
}
return period;
}
private Integer validateAndNormaliseTotpSkew(Integer skew, List<String> errors) {
// We default to 1 if skew is null
if (skew == null) {
return 1;
}
if (skew.intValue() < 0) {
errors.add("skew should be >= 0 for a totp device.");
return null;
}
return skew;
}
private String validateAndNormaliseTotpDeviceName(String deviceName, List<String> errors) {
if (deviceName == null) {
return null;
}
if (deviceName.length() > 256) {
errors.add("TOTP deviceName " + deviceName + " is too long. Max length is 256.");
}
// We normalise the deviceName as per the ImportTotpDeviceAPI.java
return deviceName.trim();
}
private void validateAndNormaliseIsPrimaryField(JsonArray jsonLoginMethods, List<String> errors) {
// We are validating that only one loginMethod has isPrimary as true
boolean hasPrimaryLoginMethod = false;
for (JsonElement jsonLoginMethod : jsonLoginMethods) {
JsonObject jsonLoginMethodObj = jsonLoginMethod.getAsJsonObject();
if (validateJsonFieldType(jsonLoginMethodObj, "isPrimary", ValueType.BOOLEAN)) {
if (jsonLoginMethodObj.get("isPrimary").getAsBoolean()) {
if (hasPrimaryLoginMethod) {
errors.add("No two loginMethods can have isPrimary as true.");
}
hasPrimaryLoginMethod = true;
}
}
}
}
private String validateAndNormaliseRecipeId(String recipeId, List<String> errors) {
if (recipeId == null) {
return null;
}
// We don't perform any normalisation on the recipeId after reading it from request header.
// We will validate it as is.
if (!Arrays.asList("emailpassword", "thirdparty", "passwordless").contains(recipeId)) {
errors.add("Invalid recipeId for loginMethod. Pass one of emailpassword, thirdparty or, passwordless!");
}
return recipeId;
}
private List<String> validateAndNormaliseTenantIds(Main main, AppIdentifier appIdentifier,
JsonArray tenantIds, List<String> errors, String errorSuffix)
throws StorageQueryException, TenantOrAppNotFoundException {
if (tenantIds == null) {
return List.of(TenantIdentifier.DEFAULT_TENANT_ID); // Default to DEFAULT_TENANT_ID ("public")
}
List<String> normalisedTenantIds = new ArrayList<>();
for (JsonElement tenantIdEl : tenantIds) {
String tenantId = tenantIdEl.getAsString();
tenantId = validateAndNormaliseTenantId(main, appIdentifier, tenantId, errors, errorSuffix);
if (tenantId != null) {
normalisedTenantIds.add(tenantId);
}
}
return normalisedTenantIds;
}
private String validateAndNormaliseTenantId(Main main, AppIdentifier appIdentifier, String tenantId,
List<String> errors, String errorSuffix)
throws StorageQueryException, TenantOrAppNotFoundException {
if (tenantId == null || tenantId.equals(TenantIdentifier.DEFAULT_TENANT_ID)) {
return tenantId;
}
if (Arrays.stream(FeatureFlag.getInstance(main, appIdentifier).getEnabledFeatures())
.noneMatch(t -> t == EE_FEATURES.MULTI_TENANCY)) {
errors.add("Multitenancy must be enabled before importing users to a different tenant.");
return null;
}
// We make the tenantId lowercase while parsing from the request in WebserverAPI.java
String normalisedTenantId = tenantId.trim().toLowerCase();
TenantConfig[] allTenantConfigs = Multitenancy.getAllTenantsForApp(appIdentifier, main);
Set<String> validTenantIds = new HashSet<>();
Arrays.stream(allTenantConfigs)
.forEach(tenantConfig -> validTenantIds.add(tenantConfig.tenantIdentifier.getTenantId()));
if (!validTenantIds.contains(normalisedTenantId)) {
errors.add("Invalid tenantId: " + tenantId + errorSuffix);
return null;
}
return normalisedTenantId;
}
private Boolean validateAndNormaliseIsPrimary(Boolean isPrimary) {
// We set the default value as false
return isPrimary == null ? false : isPrimary;
}
private Boolean validateAndNormaliseIsVerified(Boolean isVerified) {
// We set the default value as false
return isVerified == null ? false : isVerified;
}
private long validateAndNormaliseTimeJoined(Long timeJoined, List<String> errors) {
// We default timeJoined to currentTime if it is null
if (timeJoined == null) {
return System.currentTimeMillis();
}
if (timeJoined > System.currentTimeMillis()) {
errors.add("timeJoined cannot be in future for a loginMethod.");
}
if (timeJoined < 0) {
errors.add("timeJoined cannot be < 0 for a loginMethod.");
}
return timeJoined.longValue();
}
private String validateAndNormaliseEmail(String email, List<String> errors) {
if (email == null) {
return null;
}
if (email.length() > 255) {
errors.add("email " + email + " is too long. Max length is 256.");
}
// We normalise the email as per the SignUpAPI.java
return Utils.normaliseEmail(email);
}
private CoreConfig.PASSWORD_HASHING_ALG validateAndNormaliseHashingAlgorithm(String hashingAlgorithm,
List<String> errors) {
if (hashingAlgorithm == null) {
return null;
}
try {
// We trim the hashingAlgorithm and make it uppercase as per the ImportUserWithPasswordHashAPI.java
return CoreConfig.PASSWORD_HASHING_ALG.valueOf(hashingAlgorithm.trim().toUpperCase());
} catch (IllegalArgumentException e) {
errors.add(
"Invalid hashingAlgorithm for emailpassword recipe. Pass one of bcrypt, argon2 or, firebase_scrypt!");
return null;
}
}
private String validateAndNormalisePasswordHash(Main main, AppIdentifier appIdentifier,
CoreConfig.PASSWORD_HASHING_ALG hashingAlgorithm, String passwordHash, List<String> errors)
throws TenantOrAppNotFoundException {
if (hashingAlgorithm == null || passwordHash == null) {
return passwordHash;
}
if (passwordHash.length() > 256) {
errors.add("passwordHash is too long. Max length is 256.");
}
// We trim the passwordHash and validate it as per ImportUserWithPasswordHashAPI.java
passwordHash = passwordHash.trim();
try {
PasswordHashingUtils.assertSuperTokensSupportInputPasswordHashFormat(appIdentifier, main, passwordHash,
hashingAlgorithm);
} catch (UnsupportedPasswordHashingFormatException e) {
errors.add(e.getMessage());
}
return passwordHash;
}
private String validateAndNormaliseThirdPartyId(String thirdPartyId, List<String> errors) {
if (thirdPartyId == null) {
return null;
}
if (thirdPartyId.length() > 28) {
errors.add("thirdPartyId " + thirdPartyId + " is too long. Max length is 28.");
}
// We don't perform any normalisation on the thirdPartyId in SignInUpAPI.java
return thirdPartyId;
}
private String validateAndNormaliseThirdPartyUserId(String thirdPartyUserId, List<String> errors) {
if (thirdPartyUserId == null) {
return null;
}
if (thirdPartyUserId.length() > 256) {
errors.add("thirdPartyUserId " + thirdPartyUserId + " is too long. Max length is 256.");
}
// We don't perform any normalisation on the thirdPartyUserId in SignInUpAPI.java
return thirdPartyUserId;
}
private String validateAndNormalisePhoneNumber(String phoneNumber, List<String> errors) {
if (phoneNumber == null) {
return null;
}
if (phoneNumber.length() > 256) {
errors.add("phoneNumber " + phoneNumber + " is too long. Max length is 256.");
}
// We normalise the phoneNumber as per the CreateCodeAPI.java
return Utils.normalizeIfPhoneNumber(phoneNumber);
}
private void validateTenantIdsForRoleAndLoginMethods(Main main, AppIdentifier appIdentifier,
List<UserRole> userRoles, List<LoginMethod> loginMethods, List<String> errors)
throws TenantOrAppNotFoundException {
if (loginMethods == null) {
return;
}
// First validate that tenantIds provided for userRoles also exist in the loginMethods
if (userRoles != null) {
for (UserRole userRole : userRoles) {
for (String tenantId : userRole.tenantIds) {
if (!tenantId.equals(TenantIdentifier.DEFAULT_TENANT_ID) && loginMethods.stream()
.noneMatch(loginMethod -> loginMethod.tenantIds.contains(tenantId))) {
errors.add("TenantId " + tenantId + " for a user role does not exist in loginMethods.");
}
}
}
}
// Now validate that all the tenants share the same storage
String commonTenantUserPoolId = null;
for (LoginMethod loginMethod : loginMethods) {
for (String tenantId : loginMethod.tenantIds) {
TenantIdentifier tenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), tenantId);
Storage storage = StorageLayer.getStorage(tenantIdentifier, main);
String tenantUserPoolId = storage.getUserPoolId();
if (commonTenantUserPoolId == null) {
commonTenantUserPoolId = tenantUserPoolId;
} else if (!commonTenantUserPoolId.equals(tenantUserPoolId)) {
errors.add("All tenants for a user must share the same database for " + loginMethod.recipeId
+ " recipe.");
break; // Break to avoid adding the same error multiple times for the same loginMethod
}
}
}
}
public static BulkImportUser.LoginMethod getPrimaryLoginMethod(BulkImportUser user) {
return getPrimaryLoginMethod(user.loginMethods);
}
// Returns the primary loginMethod of the user. If no loginMethod is marked as
// primary, then the oldest loginMethod is returned.
public static BulkImportUser.LoginMethod getPrimaryLoginMethod(List<LoginMethod> loginMethods) {
BulkImportUser.LoginMethod oldestLM = loginMethods.get(0);
for (BulkImportUser.LoginMethod lm : loginMethods) {
if (lm.isPrimary) {
return lm;
}
if (lm.timeJoinedInMSSinceEpoch < oldestLM.timeJoinedInMSSinceEpoch) {
oldestLM = lm;
}
}
return oldestLM;
}
public enum IDMode {
GENERATE,
READ_STORED;
}
// Returns a map of recipe user ids -> primary user ids
public static Map<String, String> collectRecipeIdsToPrimaryIds(List<BulkImportUser> users) {
Map<String, String> recipeUserIdByPrimaryUserId = new HashMap<>();
if(users == null){
return recipeUserIdByPrimaryUserId;
}
for(BulkImportUser user: users){
LoginMethod primaryLM = BulkImportUserUtils.getPrimaryLoginMethod(user);
for (LoginMethod lm : user.loginMethods) {
if (lm.getSuperTokenOrExternalUserId().equals(primaryLM.getSuperTokenOrExternalUserId())) {
continue;
}
recipeUserIdByPrimaryUserId.put(lm.getSuperTokenOrExternalUserId(),
primaryLM.getSuperTokenOrExternalUserId());
}
}
return recipeUserIdByPrimaryUserId;
}
public static LoginMethod findLoginMethodByRecipeUserId(List<BulkImportUser> users, String recipeUserId) {
if(users == null || users.isEmpty() || recipeUserId == null){
return null;
}
for(BulkImportUser user: users) {
for (LoginMethod loginMethod : user.loginMethods) {
if (recipeUserId.equals(loginMethod.superTokensUserId)) {
return loginMethod;
}
}
}
return null;
}
public static BulkImportUser findUserByPrimaryId(List<BulkImportUser> users, String primaryUserId) {
if(users == null || users.isEmpty() || primaryUserId == null){
return null;
}
for(BulkImportUser user: users) {
if(primaryUserId.equals(user.primaryUserId)){
return user;
}
}
return null;
}
}

View File

@ -1,33 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.bulkimport.exceptions;
import java.util.List;
public class InvalidBulkImportDataException extends Exception {
private static final long serialVersionUID = 1L;
public List<String> errors;
public InvalidBulkImportDataException(List<String> errors) {
super("Data has missing or invalid fields. Please check the errors field for more details.");
this.errors = errors;
}
public void addError(String error) {
this.errors.add(error);
}
}

View File

@ -32,13 +32,10 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
private static final String HOST_FILE_KEY = "host=";
private static final String TEST_MODE = "test_mode";
private static final String FORCE_NO_IN_MEM_DB = "forceNoInMemDB=true";
private static final String TEMP_DIR_LOCATION_KEY = "tempDirLocation=";
private final String installationPath;
private final String configFilePath;
private final Integer port;
private final String host;
private final String tempDirLocation;
// if this is true, then even in DEV mode, we will not use in memory db, even if there is an error in the plugin
private final boolean forceNoInMemoryDB;
@ -47,7 +44,6 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
checkIfArgsIsCorrect(args);
String installationPath = args[0];
String configFilePathTemp = null;
String tempDirLocationPath = null;
Integer portTemp = null;
String hostTemp = null;
boolean forceNoInMemoryDBTemp = false;
@ -58,16 +54,7 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
if (!new File(configFilePathTemp).isAbsolute()) {
throw new QuitProgramException("configPath option must be an absolute path only");
}
} else if (curr.startsWith(TEMP_DIR_LOCATION_KEY)) {
tempDirLocationPath = curr.split(TEMP_DIR_LOCATION_KEY)[1];
if (!new File(tempDirLocationPath).isAbsolute()) {
throw new QuitProgramException("tempDirLocation option must be an absolute path only");
}
if(!tempDirLocationPath.isEmpty() && !tempDirLocationPath.endsWith(File.separator)){
tempDirLocationPath = tempDirLocationPath + File.separator;
}
}
else if (curr.startsWith(PORT_FILE_KEY)) {
} else if (curr.startsWith(PORT_FILE_KEY)) {
portTemp = Integer.parseInt(curr.split(PORT_FILE_KEY)[1]);
} else if (curr.startsWith(HOST_FILE_KEY)) {
hostTemp = curr.split(HOST_FILE_KEY)[1];
@ -82,7 +69,6 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
this.port = portTemp;
this.host = hostTemp;
this.forceNoInMemoryDB = forceNoInMemoryDBTemp;
this.tempDirLocation = tempDirLocationPath;
}
private static CLIOptions getInstance(Main main) {
@ -137,8 +123,4 @@ public class CLIOptions extends ResourceDistributor.SingletonResource {
public boolean isForceNoInMemoryDB() {
return this.forceNoInMemoryDB;
}
public String getTempDirLocation() {
return tempDirLocation;
}
}

View File

@ -18,6 +18,7 @@ package io.supertokens.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import io.supertokens.Main;
@ -52,8 +53,6 @@ public class Config extends ResourceDistributor.SingletonResource {
final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
Object configObj = mapper.readValue(new File(configFilePath), Object.class);
JsonObject jsonConfig = new GsonBuilder().serializeNulls().create().toJsonTree(configObj).getAsJsonObject();
CoreConfig.updateConfigJsonFromEnv(jsonConfig);
StorageLayer.updateConfigJsonFromEnv(main, jsonConfig);
CoreConfig config = ConfigMapper.mapConfig(jsonConfig, CoreConfig.class);
config.normalizeAndValidate(main, true);
this.core = config;
@ -93,20 +92,12 @@ public class Config extends ResourceDistributor.SingletonResource {
// omit them from the output json.
ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory());
Object obj = yamlReader.readValue(new File(getConfigFilePath(main)), Object.class);
JsonObject configJson = new GsonBuilder().serializeNulls().create().toJsonTree(obj).getAsJsonObject();
CoreConfig.updateConfigJsonFromEnv(configJson);
StorageLayer.updateConfigJsonFromEnv(main, configJson);
return configJson;
return new GsonBuilder().serializeNulls().create().toJsonTree(obj).getAsJsonObject();
}
private static String getConfigFilePath(Main main) {
String configFile = "config.yaml";
if (Main.isTesting) {
String workerId = System.getProperty("org.gradle.test.worker", "");
configFile = "config" + workerId + ".yaml";
}
return CLIOptions.get(main).getConfigFilePath() == null
? CLIOptions.get(main).getInstallationPath() + configFile
? CLIOptions.get(main).getInstallationPath() + "config.yaml"
: CLIOptions.get(main).getConfigFilePath();
}
@ -128,12 +119,12 @@ public class Config extends ResourceDistributor.SingletonResource {
// At this point, we know that all configs are valid.
try {
main.getResourceDistributor().withResourceDistributorLock(() -> {
Map<ResourceDistributor.KeyClass, ResourceDistributor.SingletonResource> existingResources =
main.getResourceDistributor()
.getAllResourcesWithResourceKey(RESOURCE_KEY);
main.getResourceDistributor().clearAllResourcesWithResourceKey(RESOURCE_KEY);
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
try {
try {
Map<ResourceDistributor.KeyClass, ResourceDistributor.SingletonResource> existingResources =
main.getResourceDistributor()
.getAllResourcesWithResourceKey(RESOURCE_KEY);
main.getResourceDistributor().clearAllResourcesWithResourceKey(RESOURCE_KEY);
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
ResourceDistributor.SingletonResource resource = existingResources.get(
new ResourceDistributor.KeyClass(
key.getTenantIdentifier(),
@ -147,16 +138,19 @@ public class Config extends ResourceDistributor.SingletonResource {
main.getResourceDistributor()
.setResource(key.getTenantIdentifier(), RESOURCE_KEY,
new Config(main, normalisedConfigs.get(key)));
}
} catch (Exception e) {
Logging.error(main, key.getTenantIdentifier(), e.getMessage(), false);
// continue loading other resources
}
} catch (InvalidConfigException | IOException e) {
throw new ResourceDistributor.FuncException(e);
}
return null;
});
} catch (ResourceDistributor.FuncException e) {
throw new IllegalStateException("should never happen", e);
if (e.getCause() instanceof InvalidConfigException) {
throw (InvalidConfigException) e.getCause();
}
throw new RuntimeException(e);
}
}
@ -315,7 +309,7 @@ public class Config extends ResourceDistributor.SingletonResource {
@TestOnly
public static CoreConfig getConfig(Main main) {
try {
return getConfig(ResourceDistributor.getAppForTesting(), main);
return getConfig(new TenantIdentifier(null, null, null), main);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}

View File

@ -41,8 +41,6 @@ import org.jetbrains.annotations.TestOnly;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.regex.PatternSyntaxException;
@ -64,11 +62,6 @@ public class CoreConfig {
public static final String[] PROTECTED_CONFIGS = new String[]{
"ip_allow_regex",
"ip_deny_regex",
"oauth_provider_public_service_url",
"oauth_provider_admin_service_url",
"oauth_provider_consent_login_base_url",
"oauth_provider_url_configured_in_oauth_provider",
"saml_legacy_acs_url"
};
@IgnoreForAnnotationCheck
@ -76,13 +69,11 @@ public class CoreConfig {
@ConfigDescription("The version of the core config.")
private int core_config_version = -1;
@EnvName("ACCESS_TOKEN_VALIDITY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("Time in seconds for how long an access token is valid for. [Default: 3600 (1 hour)]")
private long access_token_validity = 3600; // in seconds
@EnvName("ACCESS_TOKEN_BLACKLISTING")
@NotConflictingInApp
@JsonProperty
@ConfigDescription(
@ -91,20 +82,17 @@ public class CoreConfig {
"call that requires authentication. (Default: false)")
private boolean access_token_blacklisting = false;
@EnvName("REFRESH_TOKEN_VALIDITY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("Time in mins for how long a refresh token is valid for. [Default: 60 * 2400 (100 days)]")
private double refresh_token_validity = 60 * 2400; // in mins
@EnvName("PASSWORD_RESET_TOKEN_LIFETIME")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"Time in milliseconds for how long a password reset token / link is valid for. [Default: 3600000 (1 hour)]")
private long password_reset_token_lifetime = 3600000; // in MS
@EnvName("EMAIL_VERIFICATION_TOKEN_LIFETIME")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
@ -112,27 +100,23 @@ public class CoreConfig {
" 1000 (1 day)]")
private long email_verification_token_lifetime = 24 * 3600 * 1000; // in MS
@EnvName("PASSWORDLESS_MAX_CODE_INPUT_ATTEMPTS")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"The maximum number of code input attempts per login before the user needs to restart. (Default: 5)")
private int passwordless_max_code_input_attempts = 5;
@EnvName("PASSWORDLESS_CODE_LIFETIME")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
"Time in milliseconds for how long a passwordless code is valid for. [Default: 900000 (15 mins)]")
private long passwordless_code_lifetime = 900000; // in MS
@EnvName("TOTP_MAX_ATTEMPTS")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription("The maximum number of invalid TOTP attempts that will trigger rate limiting. (Default: 5)")
private int totp_max_attempts = 5;
@EnvName("TOTP_RATE_LIMIT_COOLDOWN_SEC")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
@ -143,7 +127,6 @@ public class CoreConfig {
@IgnoreForAnnotationCheck
private final String logDefault = "asdkfahbdfk3kjHS";
@EnvName("INFO_LOG_PATH")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
@ -152,7 +135,6 @@ public class CoreConfig {
"directory/logs/info.log)")
private String info_log_path = logDefault;
@EnvName("ERROR_LOG_PATH")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
@ -161,7 +143,6 @@ public class CoreConfig {
"directory/logs/error.log)")
private String error_log_path = logDefault;
@EnvName("ACCESS_TOKEN_SIGNING_KEY_DYNAMIC")
@NotConflictingInApp
@JsonProperty
@ConfigDescription(
@ -169,20 +150,17 @@ public class CoreConfig {
" be signed using a static signing key. (Default: true)")
private boolean access_token_signing_key_dynamic = true;
@EnvName("ACCESS_TOKEN_DYNAMIC_SIGNING_KEY_UPDATE_INTERVAL")
@NotConflictingInApp
@JsonProperty("access_token_dynamic_signing_key_update_interval")
@JsonAlias({"access_token_dynamic_signing_key_update_interval", "access_token_signing_key_update_interval"})
@ConfigDescription("Time in hours for how frequently the dynamic signing key will change. [Default: 168 (1 week)]")
private double access_token_dynamic_signing_key_update_interval = 168; // in hours
@EnvName("SUPERTOKENS_PORT")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("The port at which SuperTokens service runs. (Default: 3567)")
private int port = 3567;
@EnvName("SUPERTOKENS_HOST")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
@ -190,13 +168,11 @@ public class CoreConfig {
" address associated with your machine. (Default: localhost)")
private String host = "localhost";
@EnvName("MAX_SERVER_POOL_SIZE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Sets the max thread pool size for incoming http server requests. (Default: 10)")
private int max_server_pool_size = 10;
@EnvName("API_KEYS")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ -206,7 +182,6 @@ public class CoreConfig {
"length of 20 chars. (Default: null)")
private String api_keys = null;
@EnvName("DISABLE_TELEMETRY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription(
@ -214,32 +189,27 @@ public class CoreConfig {
"(Default: false)")
private boolean disable_telemetry = false;
@EnvName("PASSWORD_HASHING_ALG")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("The password hashing algorithm to use. Values are \"ARGON2\" | \"BCRYPT\". (Default: BCRYPT)")
@EnumProperty({"ARGON2", "BCRYPT"})
private String password_hashing_alg = "BCRYPT";
@EnvName("ARGON2_ITERATIONS")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Number of iterations for argon2 password hashing. (Default: 1)")
private int argon2_iterations = 1;
@EnvName("ARGON2_MEMORY_KB")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Amount of memory in kb for argon2 password hashing. [Default: 87795 (85 mb)]")
private int argon2_memory_kb = 87795; // 85 mb
@EnvName("ARGON2_PARALLELISM")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Amount of parallelism for argon2 password hashing. (Default: 2)")
private int argon2_parallelism = 2;
@EnvName("ARGON2_HASHING_POOL_SIZE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
@ -247,7 +217,6 @@ public class CoreConfig {
"(Default: 1)")
private int argon2_hashing_pool_size = 1;
@EnvName("FIREBASE_PASSWORD_HASHING_POOL_SIZE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
@ -255,7 +224,6 @@ public class CoreConfig {
"(Default: 1)")
private int firebase_password_hashing_pool_size = 1;
@EnvName("BCRYPT_LOG_ROUNDS")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Number of rounds to set for bcrypt password hashing. (Default: 11)")
@ -271,16 +239,13 @@ public class CoreConfig {
// # webserver_https_enabled:
@ConfigYamlOnly
@JsonProperty
@IgnoreForAnnotationCheck
private boolean webserver_https_enabled = false;
@EnvName("BASE_PATH")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription("Used to prepend a base path to all APIs when querying the core.")
private String base_path = "";
@EnvName("LOG_LEVEL")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
@ -289,13 +254,11 @@ public class CoreConfig {
@EnumProperty({"DEBUG", "INFO", "WARN", "ERROR", "NONE"})
private String log_level = "INFO";
@EnvName("FIREBASE_PASSWORD_HASHING_SIGNER_KEY")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("The signer key used for firebase scrypt password hashing. (Default: null)")
private String firebase_password_hashing_signer_key = null;
@EnvName("IP_ALLOW_REGEX")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
@ -303,7 +266,6 @@ public class CoreConfig {
"127\\.\\d+\\.\\d+\\.\\d+|::1|0:0:0:0:0:0:0:1 to allow only localhost to query the core")
private String ip_allow_regex = null;
@EnvName("IP_DENY_REGEX")
@IgnoreForAnnotationCheck
@JsonProperty
@ConfigDescription(
@ -311,48 +273,9 @@ public class CoreConfig {
" address.")
private String ip_deny_regex = null;
@EnvName("OAUTH_PROVIDER_PUBLIC_SERVICE_URL")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to connect to the OAuth provider public service.")
private String oauth_provider_public_service_url = null;
@EnvName("OAUTH_PROVIDER_ADMIN_SERVICE_URL")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to connect to the OAuth provider admin service.")
private String oauth_provider_admin_service_url = null;
@EnvName("OAUTH_PROVIDER_CONSENT_LOGIN_BASE_URL")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to replace the default consent and login URLs to {apiDomain}.")
private String oauth_provider_consent_login_base_url = null;
@EnvName("OAUTH_PROVIDER_URL_CONFIGURED_IN_OAUTH_PROVIDER")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"If specified, the core uses this URL to parse responses from the oauth provider when the oauth provider's internal address differs from the known public provider address.")
private String oauth_provider_url_configured_in_oauth_provider = null;
@EnvName("OAUTH_CLIENT_SECRET_ENCRYPTION_KEY")
@ConfigYamlOnly
@JsonProperty
@HideFromDashboard
@ConfigDescription("The encryption key used for saving OAuth client secret on the database.")
private String oauth_client_secret_encryption_key = null;
@EnvName("SUPERTOKENS_SAAS_SECRET")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"This is used when deploying the core in SuperTokens SaaS infrastructure. If set, limits what database " +
"information is shown to / modifiable by the dev when they query the core to get the information " +
@ -360,7 +283,6 @@ public class CoreConfig {
"regular api_keys config.")
private String supertokens_saas_secret = null;
@EnvName("SUPERTOKENS_MAX_CDI_VERSION")
@NotConflictingInApp
@JsonProperty
@HideFromDashboard
@ -370,7 +292,6 @@ public class CoreConfig {
"null)")
private String supertokens_max_cdi_version = null;
@EnvName("SUPERTOKENS_SAAS_LOAD_ONLY_CUD")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
@ -378,83 +299,19 @@ public class CoreConfig {
"the database and block all other CUDs from being used from this instance.")
private String supertokens_saas_load_only_cud = null;
@EnvName("SAML_LEGACY_ACS_URL")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("If specified, uses this URL as ACS URL for handling legacy SAML clients")
@HideFromDashboard
private String saml_legacy_acs_url = null;
@EnvName("SAML_SP_ENTITY_ID")
@JsonProperty
@IgnoreForAnnotationCheck
@ConfigDescription("Service provider's entity ID")
private String saml_sp_entity_id = null;
@EnvName("SAML_CLAIMS_VALIDITY")
@JsonProperty
@IgnoreForAnnotationCheck
@ConfigDescription("Duration for which SAML claims will be valid before it is consumed")
private long saml_claims_validity = 300000;
@EnvName("SAML_RELAY_STATE_VALIDITY")
@JsonProperty
@IgnoreForAnnotationCheck
@ConfigDescription("Duration for which SAML relay state will be valid before it is consumed")
private long saml_relay_state_validity = 300000;
@IgnoreForAnnotationCheck
private Set<LOG_LEVEL> allowedLogLevels = null;
@IgnoreForAnnotationCheck
private boolean isNormalizedAndValid = false;
@EnvName("BULK_MIGRATION_PARALLELISM")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("If specified, the supertokens core will use the specified number of threads to complete the " +
"migration of users. (Default: number of available processor cores).")
private int bulk_migration_parallelism = Runtime.getRuntime().availableProcessors();
@EnvName("BULK_MIGRATION_BATCH_SIZE")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("If specified, the supertokens core will load the specified number of users for migrating in " +
"one single batch. (Default: 8000)")
private int bulk_migration_batch_size = 8000;
@EnvName("WEBAUTHN_RECOVER_ACCOUNT_TOKEN_LIFETIME")
@NotConflictingInApp
@JsonProperty
@ConfigDescription("Time in milliseconds for how long a webauthn account recovery token is valid for. [Default: 3600000 (1 hour)]")
private long webauthn_recover_account_token_lifetime = 3600000; // in MS;
@EnvName("OTEL_COLLECTOR_CONNECTION_URI")
@ConfigYamlOnly
@JsonProperty
@HideFromDashboard
@ConfigDescription(
"The URL of the OpenTelemetry collector to which the core will send telemetry data. " +
"This should be in the format http://<host>:<port> or https://<host>:<port>. (Default: " +
"null)")
private String otel_collector_connection_uri = null;
@EnvName("DEADLOCK_LOGGER_ENABLE")
@ConfigYamlOnly
@JsonProperty
@ConfigDescription(
"Enables or disables the deadlock logger. (Default: false)")
private boolean deadlock_logger_enable = false;
@IgnoreForAnnotationCheck
private static boolean disableOAuthValidationForTest = false;
@TestOnly
public static void setDisableOAuthValidationForTest(boolean val) {
if (!Main.isTesting) {
throw new IllegalStateException("This method can only be called during testing");
}
disableOAuthValidationForTest = val;
}
"The URL of the OpenTelemetry collector to which the core " +
"will send telemetry data. This should be in the format http://<host>:<port> or https://<host>:<port>.")
private String otel_collector_connection_uri = "http://localhost:4317";
public static Set<String> getValidFields() {
CoreConfig coreConfig = new CoreConfig();
@ -470,41 +327,6 @@ public class CoreConfig {
return validFields;
}
public String getOAuthProviderPublicServiceUrl() throws InvalidConfigException {
if (oauth_provider_public_service_url == null) {
throw new InvalidConfigException("oauth_provider_public_service_url is not set");
}
return oauth_provider_public_service_url;
}
public String getOAuthProviderAdminServiceUrl() throws InvalidConfigException {
if (oauth_provider_admin_service_url == null) {
throw new InvalidConfigException("oauth_provider_public_service_url is not set");
}
return oauth_provider_admin_service_url;
}
public String getOauthProviderConsentLoginBaseUrl() throws InvalidConfigException {
if(oauth_provider_consent_login_base_url == null){
throw new InvalidConfigException("oauth_provider_consent_login_base_url is not set");
}
return oauth_provider_consent_login_base_url;
}
public String getOAuthProviderUrlConfiguredInOAuthProvider() throws InvalidConfigException {
if(oauth_provider_url_configured_in_oauth_provider == null) {
throw new InvalidConfigException("oauth_provider_url_configured_in_oauth_provider is not set");
}
return oauth_provider_url_configured_in_oauth_provider;
}
public String getOAuthClientSecretEncryptionKey() throws InvalidConfigException {
if(oauth_client_secret_encryption_key == null) {
throw new InvalidConfigException("oauth_client_secret_encryption_key is not set");
}
return oauth_client_secret_encryption_key;
}
public String getIpAllowRegex() {
return ip_allow_regex;
}
@ -513,10 +335,6 @@ public class CoreConfig {
return ip_deny_regex;
}
public String getLogLevel() {
return log_level;
}
public Set<LOG_LEVEL> getLogLevels(Main main) {
if (allowedLogLevels != null) {
return allowedLogLevels;
@ -684,88 +502,16 @@ public class CoreConfig {
return webserver_https_enabled;
}
public int getBulkMigrationParallelism() {
return bulk_migration_parallelism;
}
public long getWebauthnRecoverAccountTokenLifetime() {
return webauthn_recover_account_token_lifetime;
}
public int getBulkMigrationBatchSize() {
return bulk_migration_batch_size;
}
public String getOtelCollectorConnectionURI() {
return otel_collector_connection_uri;
}
public boolean isDeadlockLoggerEnabled() {
return deadlock_logger_enable;
}
public String getSAMLLegacyACSURL() {
return saml_legacy_acs_url;
}
public String getSAMLSPEntityID() {
return saml_sp_entity_id;
}
public long getSAMLClaimsValidity() {
return saml_claims_validity;
}
public long getSAMLRelayStateValidity() {
return saml_relay_state_validity;
}
private String getConfigFileLocation(Main main) {
return new File(CLIOptions.get(main).getConfigFilePath() == null
? CLIOptions.get(main).getInstallationPath() + "config.yaml"
: CLIOptions.get(main).getConfigFilePath()).getAbsolutePath();
}
public static void updateConfigJsonFromEnv(JsonObject configJson) {
Map<String, String> env = System.getenv();
for (Field field : CoreConfig.class.getDeclaredFields()) {
if (field.isAnnotationPresent(EnvName.class)) {
String envName = field.getAnnotation(EnvName.class).value();
String stringValue = env.get(envName);
if (stringValue == null || stringValue.isEmpty()) {
continue;
}
if (stringValue.startsWith("\"") && stringValue.endsWith("\"")) {
stringValue = stringValue.substring(1, stringValue.length() - 1);
stringValue = stringValue
.replace("\\n", "\n")
.replace("\\t", "\t")
.replace("\\r", "\r")
.replace("\\\"", "\"")
.replace("\\'", "'")
.replace("\\\\", "\\");
}
if (field.getType().equals(String.class)) {
configJson.addProperty(field.getName(), stringValue);
} else if (field.getType().equals(int.class)) {
configJson.addProperty(field.getName(), Integer.parseInt(stringValue));
} else if (field.getType().equals(long.class)) {
configJson.addProperty(field.getName(), Long.parseLong(stringValue));
} else if (field.getType().equals(boolean.class)) {
configJson.addProperty(field.getName(), Boolean.parseBoolean(stringValue));
} else if (field.getType().equals(float.class)) {
configJson.addProperty(field.getName(), Float.parseFloat(stringValue));
} else if (field.getType().equals(double.class)) {
configJson.addProperty(field.getName(), Double.parseDouble(stringValue));
}
}
}
}
void normalizeAndValidate(Main main, boolean includeConfigFilePath) throws InvalidConfigException {
if (isNormalizedAndValid) {
return;
@ -953,18 +699,6 @@ public class CoreConfig {
}
}
if (bulk_migration_parallelism < 1) {
throw new InvalidConfigException("Provided bulk_migration_parallelism must be >= 1");
}
if (bulk_migration_batch_size < 1) {
throw new InvalidConfigException("Provided bulk_migration_batch_size must be >= 1");
}
if (webauthn_recover_account_token_lifetime <= 0) {
throw new InvalidConfigException("Provided webauthn_recover_account_token_lifetime must be > 0");
}
for (String fieldId : CoreConfig.getValidFields()) {
try {
Field field = CoreConfig.class.getDeclaredField(fieldId);
@ -988,10 +722,6 @@ public class CoreConfig {
}
// Normalize
if (saml_sp_entity_id == null) {
saml_sp_entity_id = "https://saml.supertokens.com";
}
if (ip_allow_regex != null) {
ip_allow_regex = ip_allow_regex.trim();
if (ip_allow_regex.equals("")) {
@ -1087,66 +817,6 @@ public class CoreConfig {
}
}
if(oauth_provider_public_service_url != null) {
try {
URL url = new URL(oauth_provider_public_service_url);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_public_service_url is not a valid URL");
}
}
if(oauth_provider_admin_service_url != null) {
try {
URL url = new URL(oauth_provider_admin_service_url);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_admin_service_url is not a valid URL");
}
}
if(oauth_provider_consent_login_base_url != null) {
try {
URL url = new URL(oauth_provider_consent_login_base_url);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_consent_login_base_url is not a valid URL");
}
}
if(oauth_provider_url_configured_in_oauth_provider == null) {
oauth_provider_url_configured_in_oauth_provider = oauth_provider_public_service_url;
} else {
try {
URL url = new URL(oauth_provider_url_configured_in_oauth_provider);
} catch (MalformedURLException malformedURLException){
throw new InvalidConfigException("oauth_provider_url_configured_in_oauth_provider is not a valid URL");
}
}
if (!disableOAuthValidationForTest) {
List<String> configsTogetherSet = Arrays.asList(oauth_provider_public_service_url, oauth_provider_admin_service_url, oauth_provider_consent_login_base_url);
if(isAnySet(configsTogetherSet) && !isAllSet(configsTogetherSet)) {
throw new InvalidConfigException("If any of the following is set, all of them has to be set: oauth_provider_public_service_url, oauth_provider_admin_service_url, oauth_provider_consent_login_base_url");
}
}
if (Main.isTesting) {
if (oauth_provider_public_service_url == null) {
oauth_provider_public_service_url = "http://localhost:" + System.getProperty("ST_OAUTH_PROVIDER_SERVICE_PORT");
}
if (oauth_provider_admin_service_url == null) {
oauth_provider_admin_service_url = "http://localhost:" + System.getProperty("ST_OAUTH_PROVIDER_ADMIN_PORT");
}
if (oauth_provider_url_configured_in_oauth_provider == null) {
oauth_provider_url_configured_in_oauth_provider = "http://localhost:4444";
}
if (oauth_client_secret_encryption_key == null) {
oauth_client_secret_encryption_key = "clientsecretencryptionkey";
}
if (oauth_provider_consent_login_base_url == null) {
oauth_provider_consent_login_base_url = "http://localhost:3001/auth";
}
}
isNormalizedAndValid = true;
}
@ -1277,24 +947,4 @@ public class CoreConfig {
public String getMaxCDIVersion() {
return this.supertokens_max_cdi_version;
}
private boolean isAnySet(List<String> configs){
for (String config : configs){
if(config!=null){
return true;
}
}
return false;
}
private boolean isAllSet(List<String> configs) {
boolean foundNotSet = false;
for(String config: configs){
if(config == null){
foundNotSet = true;
break;
}
}
return !foundNotSet;
}
}

View File

@ -1,29 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.config.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
// Make annotation accessible at runtime so that config can be read from env
@Target(ElementType.FIELD) // Annotation can only be applied to fields
public @interface EnvName {
String value(); // String value that provides a env var name for the field
}

View File

@ -28,7 +28,6 @@ import java.util.Map;
public class CronTaskTest extends SingletonResource {
private static final String RESOURCE_ID = "io.supertokens.cronjobs.CronTaskTest";
private Map<String, Integer> cronTaskToInterval = new HashMap<String, Integer>();
private Map<String, Integer> cronTaskToWaitTime = new HashMap<String, Integer>();
private CronTaskTest() {
@ -52,13 +51,4 @@ public class CronTaskTest extends SingletonResource {
public Integer getIntervalInSeconds(String resourceId) {
return cronTaskToInterval.get(resourceId);
}
@TestOnly
public void setInitialWaitTimeInSeconds(String resourceId, int interval) {
cronTaskToWaitTime.put(resourceId, interval);
}
public Integer getInitialWaitTimeInSeconds(String resourceId) {
return cronTaskToWaitTime.get(resourceId);
}
}

View File

@ -18,6 +18,7 @@ package io.supertokens.cronjobs;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.multitenancy.MultitenancyHelper;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import org.jetbrains.annotations.TestOnly;
@ -99,16 +100,6 @@ public class Cronjobs extends ResourceDistributor.SingletonResource {
}
}
public static boolean isCronjobLoaded(Main main, CronTask task) {
if (getInstance(main) == null) {
init(main);
}
Cronjobs instance = getInstance(main);
synchronized (instance.lock) {
return instance.tasks.contains(task);
}
}
@TestOnly
public List<CronTask> getTasks() {
return this.tasks;

View File

@ -1,194 +0,0 @@
/*
* Copyright (c) 2024. VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.bulkimport;
import io.supertokens.Main;
import io.supertokens.bulkimport.BulkImport;
import io.supertokens.bulkimport.BulkImportUserUtils;
import io.supertokens.config.Config;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.output.Logging;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.bulkimport.BulkImportStorage;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.sqlStorage.BulkImportSQLStorage;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.storageLayer.StorageLayer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ProcessBulkImportUsers extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.ProcessBulkImportUsers";
private ExecutorService executorService;
private ProcessBulkImportUsers(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("ProcessBulkImportUsers", main, tenantsInfo, true);
}
public static ProcessBulkImportUsers init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (ProcessBulkImportUsers) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new ProcessBulkImportUsers(main, tenantsInfo));
}
@Override
protected void doTaskPerApp(AppIdentifier app)
throws TenantOrAppNotFoundException, StorageQueryException {
if (StorageLayer.getBaseStorage(main).getType() != STORAGE_TYPE.SQL || StorageLayer.isInMemDb(main)) {
return;
}
BulkImportSQLStorage bulkImportSQLStorage = (BulkImportSQLStorage) StorageLayer
.getStorage(app.getAsPublicTenantIdentifier(), main);
//split the loaded users list into smaller chunks
int numberOfBatchChunks = Config.getConfig(app.getAsPublicTenantIdentifier(), main)
.getBulkMigrationParallelism();
int bulkMigrationBatchSize = Config.getConfig(app.getAsPublicTenantIdentifier(), main)
.getBulkMigrationBatchSize();
Logging.debug(main, app.getAsPublicTenantIdentifier(), "CronTask starts. Instance: " + this);
Logging.debug(main, app.getAsPublicTenantIdentifier(), "CronTask starts. Processing bulk import users with " + bulkMigrationBatchSize
+ " batch size, one batch split into " + numberOfBatchChunks + " chunks");
executorService = Executors.newFixedThreadPool(numberOfBatchChunks);
String[] allUserRoles = StorageUtils.getUserRolesStorage(bulkImportSQLStorage).getRoles(app);
BulkImportUserUtils bulkImportUserUtils = new BulkImportUserUtils(allUserRoles);
long newUsers = bulkImportSQLStorage.getBulkImportUsersCount(app, BulkImportStorage.BULK_IMPORT_USER_STATUS.NEW);
long processingUsers = bulkImportSQLStorage.getBulkImportUsersCount(app, BulkImportStorage.BULK_IMPORT_USER_STATUS.PROCESSING);
long failedUsers = 0;
//taking a "snapshot" here and processing in this round as many users as there are uploaded now. After this the processing will go on
//with another app and gets back here when all the apps had a chance.
long usersProcessed = 0;
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Found " + (newUsers + processingUsers) + " waiting for processing"
+ " (" + newUsers + " new, " + processingUsers + " processing)");;
while(usersProcessed < (newUsers + processingUsers)) {
List<BulkImportUser> users = bulkImportSQLStorage.getBulkImportUsersAndChangeStatusToProcessing(app,
bulkMigrationBatchSize);
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Loaded " + users.size() + " users to process");
if (users == null || users.isEmpty()) {
// "No more users to process!"
break;
}
List<List<BulkImportUser>> loadedUsersChunks = makeChunksOf(users, numberOfBatchChunks);
for (List<BulkImportUser> chunk : loadedUsersChunks) {
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Chunk size: " + chunk.size());
}
try {
List<Future<?>> tasks = new ArrayList<>();
for (int i = 0; i < numberOfBatchChunks && i < loadedUsersChunks.size(); i++) {
tasks.add(
executorService.submit(new ProcessBulkUsersImportWorker(main, app, loadedUsersChunks.get(i),
bulkImportSQLStorage, bulkImportUserUtils)));
}
for (Future<?> task : tasks) {
while (!task.isDone()) {
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Waiting for task " + task + " to finish");
Thread.sleep(1000);
}
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Task " + task + " finished");
try {
Void result = (Void) task.get(); //to know if there were any errors while executing and for
// waiting in this thread for all the other threads to finish up
Logging.debug(main, app.getAsPublicTenantIdentifier(),
"Task " + task + " finished with result: " + result);
} catch (ExecutionException executionException) {
Logging.error(main, app.getAsPublicTenantIdentifier(),
"Error while processing bulk import users", true,
executionException);
throw new RuntimeException(executionException);
}
usersProcessed += loadedUsersChunks.get(tasks.indexOf(task)).size();
failedUsers = bulkImportSQLStorage.getBulkImportUsersCount(app, BulkImportStorage.BULK_IMPORT_USER_STATUS.FAILED);
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Chunk " + tasks.indexOf(task) + " finished processing, all chunks processed: "
+ usersProcessed + " users (" + failedUsers + " failed)");
}
Logging.debug(main, app.getAsPublicTenantIdentifier(), "Processing round finished");
} catch (InterruptedException e) {
Logging.error(main, app.getAsPublicTenantIdentifier(), "Error while processing bulk import users", true,
e);
throw new RuntimeException(e);
}
}
executorService.shutdownNow();
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
return BulkImport.PROCESS_USERS_INTERVAL_SECONDS;
}
@Override
public int getInitialWaitTimeSeconds() {
if (Main.isTesting) {
Integer waitTime = CronTaskTest.getInstance(main).getInitialWaitTimeInSeconds(RESOURCE_KEY);
if (waitTime != null) {
return waitTime;
}
}
return 0;
}
private List<List<BulkImportUser>> makeChunksOf(List<BulkImportUser> users, int numberOfChunks) {
List<List<BulkImportUser>> chunks = new ArrayList<>();
if (users != null && !users.isEmpty() && numberOfChunks > 0) {
AtomicInteger index = new AtomicInteger(0);
int chunkSize = users.size() / numberOfChunks + 1;
Stream<List<BulkImportUser>> listStream = users.stream()
.collect(Collectors.groupingBy(x -> index.getAndIncrement() / chunkSize))
.entrySet().stream()
.sorted(Map.Entry.comparingByKey()).map(Map.Entry::getValue);
listStream.forEach(chunks::add);
}
return chunks;
}
}

View File

@ -1,327 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.bulkimport;
import com.google.gson.JsonObject;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.bulkimport.BulkImport;
import io.supertokens.bulkimport.BulkImportUserUtils;
import io.supertokens.bulkimport.exceptions.InvalidBulkImportDataException;
import io.supertokens.config.Config;
import io.supertokens.multitenancy.Multitenancy;
import io.supertokens.output.Logging;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.bulkimport.BulkImportUser;
import io.supertokens.pluginInterface.bulkimport.exceptions.BulkImportBatchInsertException;
import io.supertokens.pluginInterface.bulkimport.exceptions.BulkImportTransactionRolledBackException;
import io.supertokens.pluginInterface.bulkimport.sqlStorage.BulkImportSQLStorage;
import io.supertokens.pluginInterface.exceptions.DbInitException;
import io.supertokens.pluginInterface.exceptions.InvalidConfigException;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantConfig;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.sqlStorage.SQLStorage;
import io.supertokens.storageLayer.StorageLayer;
import java.io.IOException;
import java.util.*;
public class ProcessBulkUsersImportWorker implements Runnable {
private final Map<String, SQLStorage> userPoolToStorageMap = new HashMap<>();
private final Main main;
private final AppIdentifier app;
private final BulkImportSQLStorage bulkImportSQLStorage;
private final BulkImportUserUtils bulkImportUserUtils;
private final List<BulkImportUser> usersToProcess;
ProcessBulkUsersImportWorker(Main main, AppIdentifier app, List<BulkImportUser> usersToProcess, BulkImportSQLStorage bulkImportSQLStorage, BulkImportUserUtils bulkImportUserUtils){
this.main = main;
this.app = app;
this.usersToProcess = usersToProcess;
this.bulkImportSQLStorage = bulkImportSQLStorage;
this.bulkImportUserUtils = bulkImportUserUtils;
}
@Override
public void run() {
try {
processMultipleUsers(app, usersToProcess, bulkImportUserUtils, bulkImportSQLStorage);
} catch (TenantOrAppNotFoundException | DbInitException | IOException | StorageQueryException e) {
throw new RuntimeException(e);
}
}
private void processMultipleUsers(AppIdentifier appIdentifier, List<BulkImportUser> users,
BulkImportUserUtils bulkImportUserUtils,
BulkImportSQLStorage baseTenantStorage)
throws TenantOrAppNotFoundException, StorageQueryException, IOException,
DbInitException {
BulkImportUser user = null;
try {
Logging.debug(main, appIdentifier.getAsPublicTenantIdentifier(),
"Processing bulk import users: " + users.size());
final Storage[] allStoragesForApp = getAllProxyStoragesForApp(main, appIdentifier);
int userIndexPointer = 0;
List<BulkImportUser> validUsers = new ArrayList<>();
Map<String, Exception> validationErrorsBeforeActualProcessing = new HashMap<>();
while(userIndexPointer < users.size()) {
user = users.get(userIndexPointer);
if (Main.isTesting && Main.isTesting_skipBulkImportUserValidationInCronJob) {
// Skip validation when the flag is enabled during testing
// Skip validation if it's a retry run. This already passed validation. A revalidation triggers
// an invalid external user id already exists validation error - which is not true!
validUsers.add(user);
} else {
// Validate the user
try {
validUsers.add(bulkImportUserUtils.createBulkImportUserFromJSON(main, appIdentifier,
user.toJsonObject(), BulkImportUserUtils.IDMode.READ_STORED));
} catch (InvalidBulkImportDataException exception) {
validationErrorsBeforeActualProcessing.put(user.id, new Exception(
String.valueOf(exception.errors)));
}
}
userIndexPointer+=1;
}
if(!validationErrorsBeforeActualProcessing.isEmpty()) {
throw new BulkImportBatchInsertException("Invalid input data", validationErrorsBeforeActualProcessing);
}
// Since all the tenants of a user must share the storage, we will just use the
// storage of the first tenantId of the first loginMethod
Map<SQLStorage, List<BulkImportUser>> partitionedUsers = partitionUsersByStorage(appIdentifier, validUsers);
for(SQLStorage bulkImportProxyStorage : partitionedUsers.keySet()) {
boolean shouldRetryImmediatley = true;
while (shouldRetryImmediatley) {
shouldRetryImmediatley = bulkImportProxyStorage.startTransaction(con -> {
try {
BulkImport.processUsersImportSteps(main, appIdentifier, bulkImportProxyStorage,
partitionedUsers.get(bulkImportProxyStorage),
allStoragesForApp);
bulkImportProxyStorage.commitTransactionForBulkImportProxyStorage();
String[] toDelete = new String[validUsers.size()];
for (int i = 0; i < validUsers.size(); i++) {
toDelete[i] = validUsers.get(i).id;
}
while (true){
try {
List<String> deletedIds = baseTenantStorage.deleteBulkImportUsers(appIdentifier,
toDelete);
break;
} catch (Exception e) {
// ignore and retry delete. The import transaction is already committed, the delete should happen no matter what
Logging.debug(main, app.getAsPublicTenantIdentifier(),
"Exception while deleting bulk import users: " + e.getMessage());
}
}
} catch (StorageTransactionLogicException | StorageQueryException e) {
// We need to rollback the transaction manually because we have overridden that in the proxy
// storage
bulkImportProxyStorage.rollbackTransactionForBulkImportProxyStorage();
if (isBulkImportTransactionRolledBackIsTheRealCause(e)) {
return true;
//@see BulkImportTransactionRolledBackException for explanation
}
handleProcessUserExceptions(app, validUsers, e, baseTenantStorage);
}
return false;
});
}
}
} catch (StorageTransactionLogicException | InvalidConfigException e) {
Logging.error(main, app.getAsPublicTenantIdentifier(),
"Error while processing bulk import users: " + e.getMessage(), true, e);
throw new RuntimeException(e);
} catch (BulkImportBatchInsertException insertException) {
handleProcessUserExceptions(app, users, insertException, baseTenantStorage);
} catch (Exception e) {
Logging.error(main, app.getAsPublicTenantIdentifier(),
"Error while processing bulk import users: " + e.getMessage(), true, e);
throw e;
} finally {
closeAllProxyStorages(); //closing it here to reuse the existing connection with all the users
}
}
private boolean isBulkImportTransactionRolledBackIsTheRealCause(Throwable exception) {
if(exception instanceof BulkImportTransactionRolledBackException){
return true;
} else if(exception.getCause()!=null){
return isBulkImportTransactionRolledBackIsTheRealCause(exception.getCause());
}
return false;
}
private void handleProcessUserExceptions(AppIdentifier appIdentifier, List<BulkImportUser> usersBatch, Exception e,
BulkImportSQLStorage baseTenantStorage)
throws StorageQueryException {
// Java doesn't allow us to reassign local variables inside a lambda expression
// so we have to use an array.
String[] errorMessage = { e.getMessage() };
Map<String, String> bulkImportUserIdToErrorMessage = new HashMap<>();
switch (e) {
case StorageTransactionLogicException exception -> {
// If the exception is due to a StorageQueryException, we want to retry the entry after sometime instead
// of marking it as FAILED. We will return early in that case.
if (exception.actualException instanceof StorageQueryException) {
Logging.error(main, null,
"We got an StorageQueryException while processing a bulk import user entry. It will be " +
"retried again. Error Message: " +
e.getMessage(), true);
return;
}
if (exception.actualException instanceof BulkImportBatchInsertException) {
handleBulkImportException(usersBatch, (BulkImportBatchInsertException) exception.actualException,
bulkImportUserIdToErrorMessage);
} else {
//fail the whole batch
errorMessage[0] = exception.actualException.getMessage();
for (BulkImportUser user : usersBatch) {
bulkImportUserIdToErrorMessage.put(user.id, errorMessage[0]);
}
}
}
case InvalidBulkImportDataException invalidBulkImportDataException ->
errorMessage[0] = invalidBulkImportDataException.errors.toString();
case InvalidConfigException invalidConfigException -> errorMessage[0] = e.getMessage();
case BulkImportBatchInsertException bulkImportBatchInsertException ->
handleBulkImportException(usersBatch, bulkImportBatchInsertException,
bulkImportUserIdToErrorMessage);
default -> {
Logging.error(main, null,
"We got an error while processing a bulk import user entry. It will be " +
"retried again. Error Message: " +
e.getMessage(), true);
}
}
try {
baseTenantStorage.startTransaction(con -> {
baseTenantStorage.updateMultipleBulkImportUsersStatusToError_Transaction(appIdentifier, con,
bulkImportUserIdToErrorMessage);
return null;
});
} catch (StorageTransactionLogicException e1) {
throw new StorageQueryException(e1.actualException);
}
}
private static void handleBulkImportException(List<BulkImportUser> usersBatch, BulkImportBatchInsertException exception,
Map<String, String> bulkImportUserIdToErrorMessage) {
Map<String, Exception> userIndexToError = exception.exceptionByUserId;
for(String userid : userIndexToError.keySet()){
Optional<BulkImportUser> userWithId = usersBatch.stream()
.filter(bulkImportUser -> userid.equals(bulkImportUser.id) || userid.equals(bulkImportUser.externalUserId)).findFirst();
String id = null;
if(userWithId.isPresent()){
id = userWithId.get().id;
}
if(id == null) {
userWithId = usersBatch.stream()
.filter(bulkImportUser ->
bulkImportUser.loginMethods.stream()
.map(loginMethod -> loginMethod.superTokensUserId)
.anyMatch(s -> s!= null && s.equals(userid))).findFirst();
if(userWithId.isPresent()){
id = userWithId.get().id;
}
}
bulkImportUserIdToErrorMessage.put(id, userIndexToError.get(userid).getMessage());
}
}
private synchronized Storage getBulkImportProxyStorage(TenantIdentifier tenantIdentifier)
throws InvalidConfigException, IOException, TenantOrAppNotFoundException, DbInitException {
String userPoolId = StorageLayer.getStorage(tenantIdentifier, main).getUserPoolId();
if (userPoolToStorageMap.containsKey(userPoolId)) {
return userPoolToStorageMap.get(userPoolId);
}
TenantConfig[] allTenants = Multitenancy.getAllTenants(main);
Map<ResourceDistributor.KeyClass, JsonObject> normalisedConfigs = Config.getNormalisedConfigsForAllTenants(
allTenants,
Config.getBaseConfigAsJsonObject(main));
for (ResourceDistributor.KeyClass key : normalisedConfigs.keySet()) {
if (key.getTenantIdentifier().equals(tenantIdentifier)) {
SQLStorage bulkImportProxyStorage = (SQLStorage) StorageLayer.getNewBulkImportProxyStorageInstance(main,
normalisedConfigs.get(key), tenantIdentifier, true);
userPoolToStorageMap.put(userPoolId, bulkImportProxyStorage);
bulkImportProxyStorage.initStorage(false, new ArrayList<>());
return bulkImportProxyStorage;
}
}
throw new TenantOrAppNotFoundException(tenantIdentifier);
}
private synchronized Storage[] getAllProxyStoragesForApp(Main main, AppIdentifier appIdentifier)
throws StorageTransactionLogicException {
try {
List<Storage> allProxyStorages = new ArrayList<>();
TenantConfig[] tenantConfigs = Multitenancy.getAllTenantsForApp(appIdentifier, main);
for (TenantConfig tenantConfig : tenantConfigs) {
allProxyStorages.add(getBulkImportProxyStorage(tenantConfig.tenantIdentifier));
}
return allProxyStorages.toArray(new Storage[0]);
} catch (TenantOrAppNotFoundException e) {
throw new StorageTransactionLogicException(new Exception("E043: " + e.getMessage()));
} catch (InvalidConfigException e) {
throw new StorageTransactionLogicException(new InvalidConfigException("E044: " + e.getMessage()));
} catch (DbInitException e) {
throw new StorageTransactionLogicException(new DbInitException("E045: " + e.getMessage()));
} catch (IOException e) {
throw new StorageTransactionLogicException(new IOException("E046: " + e.getMessage()));
}
}
private void closeAllProxyStorages() throws StorageQueryException {
for (SQLStorage storage : userPoolToStorageMap.values()) {
storage.closeConnectionForBulkImportProxyStorage();
}
userPoolToStorageMap.clear();
}
private Map<SQLStorage, List<BulkImportUser>> partitionUsersByStorage(AppIdentifier appIdentifier, List<BulkImportUser> users)
throws DbInitException, TenantOrAppNotFoundException, InvalidConfigException, IOException {
Map<SQLStorage, List<BulkImportUser>> result = new HashMap<>();
for(BulkImportUser user: users) {
TenantIdentifier firstTenantIdentifier = new TenantIdentifier(appIdentifier.getConnectionUriDomain(),
appIdentifier.getAppId(), user.loginMethods.getFirst().tenantIds.getFirst());
SQLStorage bulkImportProxyStorage = (SQLStorage) getBulkImportProxyStorage(firstTenantIdentifier);
if(!result.containsKey(bulkImportProxyStorage)){
result.put(bulkImportProxyStorage, new ArrayList<>());
}
result.get(bulkImportProxyStorage).add(user);
}
return result;
}
}

View File

@ -1,63 +0,0 @@
package io.supertokens.cronjobs.cleanupOAuthSessionsAndChallenges;
import io.supertokens.Main;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.oauth.OAuthStorage;
import java.util.List;
public class CleanupOAuthSessionsAndChallenges extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.cleanupOAuthSessionsAndChallenges" +
".CleanupOAuthSessionsAndChallenges";
private CleanupOAuthSessionsAndChallenges(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("CleanupOAuthSessionsAndChallenges", main, tenantsInfo, true);
}
public static CleanupOAuthSessionsAndChallenges init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (CleanupOAuthSessionsAndChallenges) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new CleanupOAuthSessionsAndChallenges(main, tenantsInfo));
}
@Override
protected void doTaskPerStorage(Storage storage) throws Exception {
if (storage.getType() != STORAGE_TYPE.SQL) {
return;
}
OAuthStorage oauthStorage = StorageUtils.getOAuthStorage(storage);
long monthAgo = System.currentTimeMillis() / 1000 - 31 * 24 * 3600;
oauthStorage.deleteExpiredOAuthSessions(monthAgo);
oauthStorage.deleteExpiredOAuthM2MTokens(monthAgo);
oauthStorage.deleteOAuthLogoutChallengesBefore(System.currentTimeMillis() - 1000 * 60 * 60 * 48); // 48 hours
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
// Every 24 hours.
return 24 * 3600;
}
@Override
public int getInitialWaitTimeSeconds() {
if (!Main.isTesting) {
return getIntervalTimeSeconds();
} else {
return 0;
}
}
}

View File

@ -1,76 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.cleanupWebauthnExpiredData;
import io.supertokens.Main;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.webauthn.WebAuthNStorage;
import java.util.List;
public class CleanUpWebauthNExpiredDataCron extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.cleanupWebauthnExpiredData" +
".CleanUpWebauthnExpiredDataCron";
private CleanUpWebauthNExpiredDataCron(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("CleanUpWebauthnExpiredDataCron", main, tenantsInfo, true);
}
public static CleanUpWebauthNExpiredDataCron init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (CleanUpWebauthNExpiredDataCron) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new CleanUpWebauthNExpiredDataCron(main, tenantsInfo));
}
@Override
protected void doTaskPerStorage(Storage storage) throws Exception {
if (storage.getType() != STORAGE_TYPE.SQL) {
return;
}
WebAuthNStorage webAuthNStorage = StorageUtils.getWebAuthNStorage(storage);
webAuthNStorage.deleteExpiredAccountRecoveryTokens();
webAuthNStorage.deleteExpiredGeneratedOptions();
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
// Every 24 hours.
return 24 * 3600;
}
@Override
public int getInitialWaitTimeSeconds() {
if (!Main.isTesting) {
return getIntervalTimeSeconds();
} else {
return 0;
}
}
}

View File

@ -1,84 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.cronjobs.deadlocklogger;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.Arrays;
public class DeadlockLogger {
private static final DeadlockLogger INSTANCE = new DeadlockLogger();
private DeadlockLogger() {
}
public static DeadlockLogger getInstance() {
return INSTANCE;
}
public void start(){
Thread deadlockLoggerThread = new Thread(deadlockDetector, "DeadlockLoggerThread");
deadlockLoggerThread.setDaemon(true);
deadlockLoggerThread.start();
}
private final Runnable deadlockDetector = new Runnable() {
@Override
public void run() {
System.out.println("DeadlockLogger started!");
while (true) {
System.out.println("DeadlockLogger - checking");
ThreadMXBean bean = ManagementFactory.getThreadMXBean();
long[] threadIds = bean.findDeadlockedThreads(); // Returns null if no threads are deadlocked.
System.out.println("DeadlockLogger - DeadlockedThreads: " + Arrays.toString(threadIds));
if (threadIds != null) {
ThreadInfo[] infos = bean.getThreadInfo(threadIds);
boolean deadlockFound = false;
System.out.println("DEADLOCK found!");
for (ThreadInfo info : infos) {
System.out.println("ThreadName: " + info.getThreadName());
System.out.println("Thread ID: " + info.getThreadId());
System.out.println("LockName: " + info.getLockName());
System.out.println("LockOwnerName: " + info.getLockOwnerName());
System.out.println("LockedMonitors: " + Arrays.toString(info.getLockedMonitors()));
System.out.println("LockInfo: " + info.getLockInfo());
System.out.println("Stack: " + Arrays.toString(info.getStackTrace()));
System.out.println();
deadlockFound = true;
}
System.out.println("*******************************");
if(deadlockFound) {
System.out.println(" ==== ALL THREAD INFO ===");
ThreadInfo[] allThreads = bean.dumpAllThreads(true, true, 100);
for (ThreadInfo threadInfo : allThreads) {
System.out.println("THREAD: " + threadInfo.getThreadName());
System.out.println("StackTrace: " + Arrays.toString(threadInfo.getStackTrace()));
}
break;
}
}
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
};
}

View File

@ -1,53 +0,0 @@
package io.supertokens.cronjobs.deleteExpiredSAMLData;
import java.util.List;
import io.supertokens.Main;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.pluginInterface.Storage;
import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.saml.SAMLStorage;
public class DeleteExpiredSAMLData extends CronTask {
public static final String RESOURCE_KEY = "io.supertokens.cronjobs.deleteExpiredSAMLData" +
".DeleteExpiredSAMLData";
private DeleteExpiredSAMLData(Main main, List<List<TenantIdentifier>> tenantsInfo) {
super("DeleteExpiredSAMLData", main, tenantsInfo, false);
}
public static DeleteExpiredSAMLData init(Main main, List<List<TenantIdentifier>> tenantsInfo) {
return (DeleteExpiredSAMLData) main.getResourceDistributor()
.setResource(new TenantIdentifier(null, null, null), RESOURCE_KEY,
new DeleteExpiredSAMLData(main, tenantsInfo));
}
@Override
protected void doTaskPerStorage(Storage storage) throws Exception {
SAMLStorage samlStorage = StorageUtils.getSAMLStorage(storage);
samlStorage.removeExpiredSAMLCodesAndRelayStates();
}
@Override
public int getIntervalTimeSeconds() {
if (Main.isTesting) {
Integer interval = CronTaskTest.getInstance(main).getIntervalInSeconds(RESOURCE_KEY);
if (interval != null) {
return interval;
}
}
// Every hour
return 3600;
}
@Override
public int getInitialWaitTimeSeconds() {
if (!Main.isTesting) {
return getIntervalTimeSeconds();
} else {
return 0;
}
}
}

View File

@ -7,7 +7,6 @@ import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.output.Logging;
import io.supertokens.pluginInterface.STORAGE_TYPE;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import io.supertokens.pluginInterface.totp.sqlStorage.TOTPSQLStorage;
import io.supertokens.storageLayer.StorageLayer;
import org.jetbrains.annotations.TestOnly;
@ -31,11 +30,7 @@ public class DeleteExpiredTotpTokens extends CronTask {
@TestOnly
public static DeleteExpiredTotpTokens getInstance(Main main) {
try {
return (DeleteExpiredTotpTokens) main.getResourceDistributor().getResource(TenantIdentifier.BASE_TENANT, RESOURCE_KEY);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
return (DeleteExpiredTotpTokens) main.getResourceDistributor().getResource(RESOURCE_KEY);
}
@Override

View File

@ -19,9 +19,12 @@ package io.supertokens.cronjobs.syncCoreConfigWithDb;
import io.supertokens.Main;
import io.supertokens.cronjobs.CronTask;
import io.supertokens.cronjobs.CronTaskTest;
import io.supertokens.cronjobs.deleteExpiredSessions.DeleteExpiredSessions;
import io.supertokens.multitenancy.Multitenancy;
import io.supertokens.multitenancy.MultitenancyHelper;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.opentelemetry.WithinOtelSpan;
import java.util.List;
public class SyncCoreConfigWithDb extends CronTask {
@ -59,7 +62,6 @@ public class SyncCoreConfigWithDb extends CronTask {
return 60;
}
@WithinOtelSpan
@Override
protected void doTaskForTargetTenant(TenantIdentifier targetTenant) throws Exception {
MultitenancyHelper.getInstance(main).refreshTenantsInCoreBasedOnChangesInCoreConfigOrIfTenantListChanged(true);

View File

@ -157,7 +157,7 @@ public class Telemetry extends CronTask {
json.add("maus", new JsonArray());
}
String url = "https://api.supertokens.com/0/st/telemetry";
String url = "https://api.supertokens.io/0/st/telemetry";
// we call the API only if we are not testing the core, of if the request can be mocked (in case a test
// wants

View File

@ -17,7 +17,6 @@
package io.supertokens.dashboard;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.dashboard.exceptions.UserSuspendedException;
import io.supertokens.emailpassword.PasswordHashing;
import io.supertokens.featureflag.EE_FEATURES;
@ -56,7 +55,7 @@ public class Dashboard {
throws StorageQueryException, DuplicateEmailException, FeatureNotEnabledException {
try {
Storage storage = StorageLayer.getStorage(main);
return signUpDashboardUser(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage,
return signUpDashboardUser(new AppIdentifier(null, null), storage,
main, email, password);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -104,7 +103,7 @@ public class Dashboard {
public static DashboardUser[] getAllDashboardUsers(Main main)
throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
return getAllDashboardUsers(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, main);
return getAllDashboardUsers(new AppIdentifier(null, null), storage, main);
}
public static DashboardUser[] getAllDashboardUsers(AppIdentifier appIdentifier, Storage storage, Main main)
@ -128,7 +127,7 @@ public class Dashboard {
throws StorageQueryException, UserSuspendedException {
try {
Storage storage = StorageLayer.getStorage(main);
return signInDashboardUser(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage,
return signInDashboardUser(new AppIdentifier(null, null), storage,
main, email, password);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -160,7 +159,7 @@ public class Dashboard {
public static boolean deleteUserWithUserId(Main main, String userId)
throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
return deleteUserWithUserId(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, userId);
return deleteUserWithUserId(new AppIdentifier(null, null), storage, userId);
}
public static boolean deleteUserWithUserId(AppIdentifier appIdentifier, Storage storage, String userId)
@ -202,7 +201,7 @@ public class Dashboard {
public static boolean deleteUserWithEmail(Main main, String email)
throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
return deleteUserWithEmail(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, email);
return deleteUserWithEmail(new AppIdentifier(null, null), storage, email);
}
public static boolean deleteUserWithEmail(AppIdentifier appIdentifier, Storage storage, String email)
@ -224,7 +223,7 @@ public class Dashboard {
try {
Storage storage = StorageLayer.getStorage(main);
return updateUsersCredentialsWithUserId(
ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, main, userId,
new AppIdentifier(null, null), storage, main, userId,
newEmail, newPassword);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -292,7 +291,7 @@ public class Dashboard {
public static DashboardUser getDashboardUserByEmail(Main main, String email)
throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
return getDashboardUserByEmail(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, email);
return getDashboardUserByEmail(new AppIdentifier(null, null), storage, email);
}
public static DashboardUser getDashboardUserByEmail(AppIdentifier appIdentifier, Storage storage, String email)
@ -306,7 +305,7 @@ public class Dashboard {
public static boolean revokeSessionWithSessionId(Main main, String sessionId)
throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
return revokeSessionWithSessionId(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, sessionId);
return revokeSessionWithSessionId(new AppIdentifier(null, null), storage, sessionId);
}
public static boolean revokeSessionWithSessionId(AppIdentifier appIdentifier, Storage storage, String sessionId)
@ -321,7 +320,7 @@ public class Dashboard {
throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
return getAllDashboardSessionsForUser(
ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, userId);
new AppIdentifier(null, null), storage, userId);
}
public static DashboardSessionInfo[] getAllDashboardSessionsForUser(AppIdentifier appIdentifier, Storage storage,
@ -391,7 +390,7 @@ public class Dashboard {
public static boolean isValidUserSession(Main main, String sessionId)
throws StorageQueryException, UserSuspendedException {
Storage storage = StorageLayer.getStorage(main);
return isValidUserSession(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, main, sessionId);
return isValidUserSession(new AppIdentifier(null, null), storage, main, sessionId);
}
public static boolean isValidUserSession(AppIdentifier appIdentifier, Storage storage, Main main, String sessionId)

View File

@ -16,18 +16,7 @@
package io.supertokens.emailpassword;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.jetbrains.annotations.TestOnly;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.authRecipe.AuthRecipe;
import io.supertokens.config.Config;
import io.supertokens.config.CoreConfig;
@ -43,8 +32,6 @@ import io.supertokens.pluginInterface.StorageUtils;
import io.supertokens.pluginInterface.authRecipe.AuthRecipeUserInfo;
import io.supertokens.pluginInterface.authRecipe.LoginMethod;
import io.supertokens.pluginInterface.authRecipe.sqlStorage.AuthRecipeSQLStorage;
import io.supertokens.pluginInterface.bulkimport.BulkImportStorage;
import io.supertokens.pluginInterface.emailpassword.EmailPasswordImportUser;
import io.supertokens.pluginInterface.emailpassword.PasswordResetTokenInfo;
import io.supertokens.pluginInterface.emailpassword.exceptions.DuplicateEmailException;
import io.supertokens.pluginInterface.emailpassword.exceptions.DuplicatePasswordResetTokenException;
@ -61,6 +48,13 @@ import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoun
import io.supertokens.storageLayer.StorageLayer;
import io.supertokens.utils.Utils;
import io.supertokens.webserver.WebserverAPI;
import org.jetbrains.annotations.TestOnly;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
public class EmailPassword {
@ -77,7 +71,7 @@ public class EmailPassword {
@TestOnly
public static long getPasswordResetTokenLifetimeForTests(Main main) {
try {
return getPasswordResetTokenLifetime(ResourceDistributor.getAppForTesting(), main);
return getPasswordResetTokenLifetime(new TenantIdentifier(null, null, null), main);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
@ -93,7 +87,7 @@ public class EmailPassword {
throws DuplicateEmailException, StorageQueryException {
try {
Storage storage = StorageLayer.getStorage(main);
return signUp(ResourceDistributor.getAppForTesting(), storage,
return signUp(new TenantIdentifier(null, null, null), storage,
main, email, password);
} catch (TenantOrAppNotFoundException | BadPermissionException e) {
throw new IllegalStateException(e);
@ -160,7 +154,7 @@ public class EmailPassword {
Storage storage = StorageLayer.getStorage(main);
return importUserWithPasswordHash(
ResourceDistributor.getAppForTesting(), storage, main, email,
new TenantIdentifier(null, null, null), storage, main, email,
passwordHash, hashingAlgorithm);
} catch (TenantOrAppNotFoundException | BadPermissionException e) {
throw new IllegalStateException(e);
@ -183,57 +177,19 @@ public class EmailPassword {
tenantIdentifier.toAppIdentifier(), main,
passwordHash, hashingAlgorithm);
EmailPasswordSQLStorage epStorage = StorageUtils.getEmailPasswordStorage(storage);
ImportUserResponse response = null;
try {
long timeJoined = System.currentTimeMillis();
response = createUserWithPasswordHash(tenantIdentifier, storage, email, passwordHash, timeJoined);
} catch (DuplicateEmailException e) {
AuthRecipeUserInfo[] allUsers = epStorage.listPrimaryUsersByEmail(tenantIdentifier, email);
AuthRecipeUserInfo userInfoToBeUpdated = null;
LoginMethod loginMethod = null;
for (AuthRecipeUserInfo currUser : allUsers) {
for (LoginMethod currLM : currUser.loginMethods) {
if (currLM.email.equals(email) && currLM.recipeId == RECIPE_ID.EMAIL_PASSWORD && currLM.tenantIds.contains(tenantIdentifier.getTenantId())) {
userInfoToBeUpdated = currUser;
loginMethod = currLM;
break;
}
}
}
if (userInfoToBeUpdated != null) {
LoginMethod finalLoginMethod = loginMethod;
epStorage.startTransaction(con -> {
epStorage.updateUsersPassword_Transaction(tenantIdentifier.toAppIdentifier(), con,
finalLoginMethod.getSupertokensUserId(), passwordHash);
return null;
});
response = new ImportUserResponse(true, userInfoToBeUpdated);
}
}
return response;
}
public static ImportUserResponse createUserWithPasswordHash(TenantIdentifier tenantIdentifier, Storage storage,
@Nonnull String email,
@Nonnull String passwordHash, long timeJoined)
throws StorageQueryException, DuplicateEmailException, TenantOrAppNotFoundException,
StorageTransactionLogicException {
EmailPasswordSQLStorage epStorage = StorageUtils.getEmailPasswordStorage(storage);
while (true) {
String userId = Utils.getUUID();
long timeJoined = System.currentTimeMillis();
EmailPasswordSQLStorage epStorage = StorageUtils.getEmailPasswordStorage(storage);
try {
AuthRecipeUserInfo userInfo = null;
userInfo = epStorage.signUp(tenantIdentifier, userId, email, passwordHash, timeJoined);
AuthRecipeUserInfo userInfo = epStorage.signUp(tenantIdentifier, userId, email, passwordHash,
timeJoined);
return new ImportUserResponse(false, userInfo);
} catch (DuplicateUserIdException e) {
// we retry with a new userId
} catch (DuplicateEmailException e) {
if(epStorage instanceof BulkImportStorage){
throw e;
}
AuthRecipeUserInfo[] allUsers = epStorage.listPrimaryUsersByEmail(tenantIdentifier, email);
AuthRecipeUserInfo userInfoToBeUpdated = null;
LoginMethod loginMethod = null;
@ -261,17 +217,6 @@ public class EmailPassword {
}
}
public static void createMultipleUsersWithPasswordHash(Storage storage,
List<EmailPasswordImportUser> usersToImport)
throws StorageQueryException, TenantOrAppNotFoundException, StorageTransactionLogicException {
EmailPasswordSQLStorage epStorage = StorageUtils.getEmailPasswordStorage(storage);
epStorage.startTransaction(con -> {
epStorage.signUpMultipleViaBulkImport_Transaction(con, usersToImport);
return null;
});
}
@TestOnly
public static ImportUserResponse importUserWithPasswordHash(Main main, @Nonnull String email,
@Nonnull String passwordHash)
@ -279,7 +224,7 @@ public class EmailPassword {
try {
Storage storage = StorageLayer.getStorage(main);
return importUserWithPasswordHash(
ResourceDistributor.getAppForTesting(), storage,
new TenantIdentifier(null, null, null), storage,
main, email, passwordHash, null);
} catch (TenantOrAppNotFoundException | BadPermissionException e) {
throw new IllegalStateException(e);
@ -292,7 +237,7 @@ public class EmailPassword {
throws StorageQueryException, WrongCredentialsException {
try {
Storage storage = StorageLayer.getStorage(main);
return signIn(ResourceDistributor.getAppForTesting(), storage,
return signIn(new TenantIdentifier(null, null, null), storage,
main, email, password);
} catch (TenantOrAppNotFoundException | BadPermissionException e) {
throw new IllegalStateException(e);
@ -356,7 +301,7 @@ public class EmailPassword {
try {
Storage storage = StorageLayer.getStorage(main);
return generatePasswordResetTokenBeforeCdi4_0(
ResourceDistributor.getAppForTesting(), storage,
new TenantIdentifier(null, null, null), storage,
main, userId);
} catch (TenantOrAppNotFoundException | BadPermissionException | WebserverAPI.BadRequestException e) {
throw new IllegalStateException(e);
@ -369,7 +314,7 @@ public class EmailPassword {
try {
Storage storage = StorageLayer.getStorage(main);
return generatePasswordResetToken(
ResourceDistributor.getAppForTesting(), storage,
new TenantIdentifier(null, null, null), storage,
main, userId, null);
} catch (TenantOrAppNotFoundException | BadPermissionException e) {
throw new IllegalStateException(e);
@ -382,7 +327,7 @@ public class EmailPassword {
try {
Storage storage = StorageLayer.getStorage(main);
return generatePasswordResetToken(
ResourceDistributor.getAppForTesting(), storage,
new TenantIdentifier(null, null, null), storage,
main, userId, email);
} catch (TenantOrAppNotFoundException | BadPermissionException e) {
throw new IllegalStateException(e);
@ -459,7 +404,7 @@ public class EmailPassword {
StorageTransactionLogicException {
try {
Storage storage = StorageLayer.getStorage(main);
return resetPassword(ResourceDistributor.getAppForTesting(), storage,
return resetPassword(new TenantIdentifier(null, null, null), storage,
main, token, password);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -533,7 +478,7 @@ public class EmailPassword {
StorageTransactionLogicException {
try {
Storage storage = StorageLayer.getStorage(main);
return consumeResetPasswordToken(ResourceDistributor.getAppForTesting(), storage,
return consumeResetPasswordToken(new TenantIdentifier(null, null, null), storage,
token);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -631,7 +576,7 @@ public class EmailPassword {
UnknownUserIdException, DuplicateEmailException, EmailChangeNotAllowedException {
try {
Storage storage = StorageLayer.getStorage(main);
updateUsersEmailOrPassword(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage,
updateUsersEmailOrPassword(new AppIdentifier(null, null), storage,
main, userId, email, password);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -728,7 +673,7 @@ public class EmailPassword {
throws StorageQueryException {
try {
Storage storage = StorageLayer.getStorage(main);
return getUserUsingId(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, userId);
return getUserUsingId(new AppIdentifier(null, null), storage, userId);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}

View File

@ -30,8 +30,6 @@ import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoun
import org.jetbrains.annotations.TestOnly;
import org.mindrot.jbcrypt.BCrypt;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@ -44,9 +42,6 @@ public class PasswordHashing extends ResourceDistributor.SingletonResource {
final BlockingQueue<Object> firebaseSCryptBoundedQueue;
final Main main;
private final Map<String, String> cachedPasswordHashForTesting = new HashMap<>();
public static boolean bypassHashCachingInTesting = false;
private PasswordHashing(Main main) {
this.argon2BoundedQueue = new LinkedBlockingQueue<>(
Config.getBaseConfig(main).getArgon2HashingPoolSize());
@ -80,7 +75,7 @@ public class PasswordHashing extends ResourceDistributor.SingletonResource {
@TestOnly
public String createHashWithSalt(String password) {
try {
return createHashWithSalt(ResourceDistributor.getAppForTesting().toAppIdentifier(), password);
return createHashWithSalt(new AppIdentifier(null, null), password);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
@ -89,10 +84,6 @@ public class PasswordHashing extends ResourceDistributor.SingletonResource {
public String createHashWithSalt(AppIdentifier appIdentifier, String password)
throws TenantOrAppNotFoundException {
if (Main.isTesting && !bypassHashCachingInTesting && cachedPasswordHashForTesting.containsKey(password)) {
return cachedPasswordHashForTesting.get(password);
}
String passwordHash = "";
TenantIdentifier tenantIdentifier = appIdentifier.getAsPublicTenantIdentifier();
@ -117,10 +108,6 @@ public class PasswordHashing extends ResourceDistributor.SingletonResource {
} catch (UnsupportedPasswordHashingFormatException e) {
throw new IllegalStateException(e);
}
if (Main.isTesting) {
cachedPasswordHashForTesting.put(password, passwordHash);
}
return passwordHash;
}

View File

@ -23,7 +23,7 @@ import io.supertokens.config.CoreConfig;
import io.supertokens.emailpassword.exceptions.UnsupportedPasswordHashingFormatException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.exceptions.TenantOrAppNotFoundException;
import org.apache.commons.codec.binary.Base64;
import org.apache.tomcat.util.codec.binary.Base64;
import javax.annotation.Nullable;
import javax.crypto.Cipher;
@ -118,9 +118,9 @@ public class PasswordHashingUtils {
// concatenating decoded salt + separator
byte[] byteArrTemp = response.salt.getBytes(StandardCharsets.US_ASCII);
byte[] decodedSaltBytes = Base64.decodeBase64(byteArrTemp);
byte[] decodedSaltBytes = Base64.decodeBase64(byteArrTemp, 0, byteArrTemp.length);
byteArrTemp = response.saltSeparator.getBytes(StandardCharsets.US_ASCII);
byte[] decodedSaltSepBytes = Base64.decodeBase64(byteArrTemp);
byte[] decodedSaltSepBytes = Base64.decodeBase64(byteArrTemp, 0, byteArrTemp.length);
byte[] saltConcat = new byte[decodedSaltBytes.length + decodedSaltSepBytes.length];
System.arraycopy(decodedSaltBytes, 0, saltConcat, 0, decodedSaltBytes.length);
@ -136,7 +136,7 @@ public class PasswordHashingUtils {
}
// encrypting with aes
byteArrTemp = base64_signer_key.getBytes(StandardCharsets.US_ASCII);
byte[] signerBytes = Base64.decodeBase64(byteArrTemp);
byte[] signerBytes = Base64.decodeBase64(byteArrTemp, 0, byteArrTemp.length);
try {
String CIPHER = "AES/CTR/NoPadding";

View File

@ -17,7 +17,6 @@
package io.supertokens.emailverification;
import io.supertokens.Main;
import io.supertokens.ResourceDistributor;
import io.supertokens.config.Config;
import io.supertokens.emailverification.exception.EmailAlreadyVerifiedException;
import io.supertokens.emailverification.exception.EmailVerificationInvalidTokenException;
@ -45,7 +44,7 @@ public class EmailVerification {
public static long getEmailVerificationTokenLifetimeForTests(Main main) {
try {
return getEmailVerificationTokenLifetime(
ResourceDistributor.getAppForTesting(), main);
new TenantIdentifier(null, null, null), main);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
@ -63,7 +62,7 @@ public class EmailVerification {
try {
Storage storage = StorageLayer.getStorage(main);
return generateEmailVerificationToken(
ResourceDistributor.getAppForTesting(), storage,
new TenantIdentifier(null, null, null), storage,
main, userId, email);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
@ -108,7 +107,7 @@ public class EmailVerification {
EmailVerificationInvalidTokenException, NoSuchAlgorithmException, StorageTransactionLogicException {
try {
Storage storage = StorageLayer.getStorage(main);
return verifyEmail(ResourceDistributor.getAppForTesting(), storage, token);
return verifyEmail(new TenantIdentifier(null, null, null), storage, token);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
@ -183,7 +182,7 @@ public class EmailVerification {
public static boolean isEmailVerified(Main main, String userId,
String email) throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
return isEmailVerified(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage,
return isEmailVerified(new AppIdentifier(null, null), storage,
userId, email);
}
@ -197,7 +196,7 @@ public class EmailVerification {
public static void revokeAllTokens(Main main, String userId,
String email) throws StorageQueryException {
Storage storage = StorageLayer.getStorage(main);
revokeAllTokens(ResourceDistributor.getAppForTesting(), storage,
revokeAllTokens(new TenantIdentifier(null, null, null), storage,
userId, email);
}
@ -212,7 +211,7 @@ public class EmailVerification {
String email) throws StorageQueryException {
try {
Storage storage = StorageLayer.getStorage(main);
unverifyEmail(ResourceDistributor.getAppForTesting().toAppIdentifier(), storage, userId, email);
unverifyEmail(new AppIdentifier(null, null), storage, userId, email);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
@ -250,7 +249,7 @@ public class EmailVerification {
try {
StorageUtils.getEmailVerificationStorage(StorageLayer.getStorage(main))
.addEmailVerificationToken(ResourceDistributor.getAppForTesting(),
.addEmailVerificationToken(new TenantIdentifier(null, null, null),
new EmailVerificationTokenInfo(userId, hashedToken,
System.currentTimeMillis() +
EmailVerification.getEmailVerificationTokenLifetimeForTests(main), email));

View File

@ -18,7 +18,7 @@ package io.supertokens.featureflag;
public enum EE_FEATURES {
ACCOUNT_LINKING("account_linking"), MULTI_TENANCY("multi_tenancy"), TEST("test"),
DASHBOARD_LOGIN("dashboard_login"), MFA("mfa"), SECURITY("security"), OAUTH("oauth"), SAML("saml");
DASHBOARD_LOGIN("dashboard_login"), MFA("mfa");
private final String name;

View File

@ -22,7 +22,6 @@ import io.supertokens.ResourceDistributor;
import io.supertokens.featureflag.exceptions.InvalidLicenseKeyException;
import io.supertokens.featureflag.exceptions.NoLicenseKeyFoundException;
import io.supertokens.httpRequest.HttpResponseException;
import io.supertokens.output.Logging;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
@ -108,7 +107,7 @@ public class FeatureFlag extends ResourceDistributor.SingletonResource {
public static FeatureFlag getInstance(Main main) {
try {
return (FeatureFlag) main.getResourceDistributor()
.getResource(ResourceDistributor.getAppForTesting(), RESOURCE_KEY);
.getResource(new AppIdentifier(null, null), RESOURCE_KEY);
} catch (TenantOrAppNotFoundException e) {
throw new IllegalStateException(e);
}
@ -133,32 +132,27 @@ public class FeatureFlag extends ResourceDistributor.SingletonResource {
.getAllResourcesWithResourceKey(RESOURCE_KEY);
main.getResourceDistributor().clearAllResourcesWithResourceKey(RESOURCE_KEY);
for (AppIdentifier app : apps) {
try {
ResourceDistributor.SingletonResource resource = existingResources.get(
new ResourceDistributor.KeyClass(
ResourceDistributor.SingletonResource resource = existingResources.get(
new ResourceDistributor.KeyClass(
app,
RESOURCE_KEY));
if (resource != null && !tenantsThatChanged.contains(app.getAsPublicTenantIdentifier())) {
main.getResourceDistributor()
.setResource(app,
RESOURCE_KEY,
resource);
} else {
main.getResourceDistributor()
.setResource(
app,
RESOURCE_KEY));
if (resource != null && !tenantsThatChanged.contains(app.getAsPublicTenantIdentifier())) {
main.getResourceDistributor()
.setResource(app,
RESOURCE_KEY,
resource);
} else {
main.getResourceDistributor()
.setResource(
app,
RESOURCE_KEY,
new FeatureFlag(main, app));
}
} catch (Exception e) {
Logging.error(main, app.getAsPublicTenantIdentifier(), e.getMessage(), false);
// continue loading other resources
RESOURCE_KEY,
new FeatureFlag(main, app));
}
}
return null;
});
} catch (ResourceDistributor.FuncException e) {
throw new IllegalStateException("should never happen", e);
throw new RuntimeException(e);
}
}

View File

@ -22,7 +22,6 @@ import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
public interface QueryExecutorTemplate {
@ -45,26 +44,6 @@ public interface QueryExecutorTemplate {
}
}
static void executeBatch(Connection connection, String QUERY, List<PreparedStatementValueSetter> setters)
throws SQLException, StorageQueryException {
if(setters == null || setters.isEmpty()) {
return;
}
try (PreparedStatement pst = connection.prepareStatement(QUERY)) {
int counter = 0;
for(PreparedStatementValueSetter setter: setters) {
setter.setValues(pst);
pst.addBatch();
counter++;
if(counter % 100 == 0) {
pst.executeBatch();
}
}
pst.executeBatch(); //for the possible remaining ones
}
}
public static int update(Start start, String QUERY, PreparedStatementValueSetter setter)
throws SQLException, StorageQueryException {
try (Connection con = ConnectionPool.getConnection(start)) {

File diff suppressed because it is too large Load Diff

View File

@ -164,40 +164,4 @@ public class SQLiteConfig {
public String getDashboardSessionsTable() {
return "dashboard_user_sessions";
}
public String getOAuthClientsTable() {
return "oauth_clients";
}
public String getOAuthRefreshTokenMappingTable() {
return "oauth_refresh_token_mapping";
}
public String getOAuthM2MTokensTable() {
return "oauth_m2m_tokens";
}
public String getOAuthSessionsTable() {
return "oauth_sessions";
}
public String getOAuthLogoutChallengesTable() {
return "oauth_logout_challenges";
}
public String getWebAuthNUsersTable(){ return "webauthn_users";}
public String getWebAuthNUserToTenantTable(){ return "webauthn_user_to_tenant"; }
public String getWebAuthNGeneratedOptionsTable() { return "webauthn_generated_options"; }
public String getWebAuthNCredentialsTable() { return "webauthn_credentials"; }
public String getWebAuthNAccountRecoveryTokenTable() { return "webauthn_account_recovery_tokens"; }
public String getSAMLClientsTable() { return "saml_clients"; }
public String getSAMLRelayStateTable() { return "saml_relay_state"; }
public String getSAMLClaimsTable() { return "saml_claims"; }
}

View File

@ -23,11 +23,6 @@ public class ActiveUsersQueries {
+ " );";
}
static String getQueryToCreateLastActiveTimeIndexForUserLastActiveTable(Start start) {
return "CREATE INDEX user_last_active_last_active_time_index ON "
+ Config.getConfig(start).getUserLastActiveTable() + "(last_active_time DESC, app_id DESC);";
}
public static int countUsersActiveSince(Start start, AppIdentifier appIdentifier, long sinceTime)
throws SQLException, StorageQueryException {
String QUERY = "SELECT COUNT(*) as total FROM " + Config.getConfig(start).getUserLastActiveTable()

View File

@ -58,11 +58,6 @@ public class EmailPasswordQueries {
+ ");";
}
static String getQueryToCreateEmailPasswordUsersEmailIndex(Start start) {
return "CREATE INDEX emailpassword_users_email_index ON "
+ Config.getConfig(start).getEmailPasswordUsersTable() + "(app_id, email);";
}
static String getQueryToCreateEmailPasswordUserToTenantTable(Start start) {
String emailPasswordUserToTenantTable = Config.getConfig(start).getEmailPasswordUserToTenantTable();
// @formatter:off
@ -80,11 +75,6 @@ public class EmailPasswordQueries {
// @formatter:on
}
static String getQueryToCreateEmailPasswordUserToTenantEmailIndex(Start start) {
return "CREATE INDEX emailpassword_user_to_tenant_email_index ON "
+ Config.getConfig(start).getEmailPasswordUserToTenantTable() + "(app_id, tenant_id, email);";
}
static String getQueryToCreatePasswordResetTokensTable(Start start) {
return "CREATE TABLE IF NOT EXISTS " + Config.getConfig(start).getPasswordResetTokensTable() + " ("
+ "app_id VARCHAR(64) DEFAULT 'public',"

View File

@ -17,7 +17,6 @@
package io.supertokens.inmemorydb.queries;
import io.supertokens.inmemorydb.ConnectionWithLocks;
import io.supertokens.inmemorydb.PreparedStatementValueSetter;
import io.supertokens.inmemorydb.Start;
import io.supertokens.inmemorydb.Utils;
import io.supertokens.inmemorydb.config.Config;
@ -34,7 +33,8 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.*;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.execute;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.update;
import static io.supertokens.inmemorydb.config.Config.getConfig;
import static java.lang.System.currentTimeMillis;
@ -51,11 +51,6 @@ public class EmailVerificationQueries {
+ ");";
}
static String getQueryToCreateEmailVerificationVerifiedEmailsAppIdIndex(Start start) {
return "CREATE INDEX emailverification_verified_emails_verified_appid_emails_index ON "
+ Config.getConfig(start).getEmailVerificationTable() + "(app_id, email);";
}
static String getQueryToCreateEmailVerificationTokensTable(Start start) {
return "CREATE TABLE IF NOT EXISTS " + Config.getConfig(start).getEmailVerificationTokensTable() + " ("
+ "app_id VARCHAR(64) DEFAULT 'public',"
@ -108,32 +103,6 @@ public class EmailVerificationQueries {
}
}
public static void updateMultipleUsersIsEmailVerified_Transaction(Start start, Connection con, AppIdentifier appIdentifier,
Map<String, String> emailToUserIds,
boolean isEmailVerified)
throws SQLException, StorageQueryException {
String QUERY;
if (isEmailVerified) {
QUERY = "INSERT INTO " + getConfig(start).getEmailVerificationTable()
+ "(app_id, user_id, email) VALUES(?, ?, ?)";
} else {
QUERY = "DELETE FROM " + getConfig(start).getEmailVerificationTable()
+ " WHERE app_id = ? AND user_id = ? AND email = ?";
}
List<PreparedStatementValueSetter> setters = new ArrayList<>();
for (Map.Entry<String, String> emailToUser : emailToUserIds.entrySet()) {
setters.add(pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, emailToUser.getValue());
pst.setString(3, emailToUser.getKey());
});
}
executeBatch(con, QUERY, setters);
}
public static void deleteAllEmailVerificationTokensForUser_Transaction(Start start, Connection con,
TenantIdentifier tenantIdentifier,
String userId,

View File

@ -17,7 +17,10 @@
package io.supertokens.inmemorydb.queries;
import io.supertokens.Main;
import io.supertokens.inmemorydb.*;
import io.supertokens.inmemorydb.ConnectionPool;
import io.supertokens.inmemorydb.ConnectionWithLocks;
import io.supertokens.inmemorydb.Start;
import io.supertokens.inmemorydb.Utils;
import io.supertokens.inmemorydb.config.Config;
import io.supertokens.pluginInterface.KeyValueInfo;
import io.supertokens.pluginInterface.RECIPE_ID;
@ -26,7 +29,6 @@ import io.supertokens.pluginInterface.authRecipe.AuthRecipeUserInfo;
import io.supertokens.pluginInterface.authRecipe.LoginMethod;
import io.supertokens.pluginInterface.dashboard.DashboardSearchTags;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.exceptions.StorageTransactionLogicException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import org.jetbrains.annotations.NotNull;
@ -43,7 +45,8 @@ import java.util.stream.Collectors;
import static io.supertokens.ProcessState.PROCESS_STATE.CREATING_NEW_TABLE;
import static io.supertokens.ProcessState.getInstance;
import static io.supertokens.inmemorydb.PreparedStatementValueSetter.NO_OP_SETTER;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.*;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.execute;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.update;
import static io.supertokens.inmemorydb.config.Config.getConfig;
import static io.supertokens.inmemorydb.queries.EmailPasswordQueries.getQueryToCreatePasswordResetTokenExpiryIndex;
import static io.supertokens.inmemorydb.queries.EmailPasswordQueries.getQueryToCreatePasswordResetTokensTable;
@ -242,10 +245,6 @@ public class GeneralQueries {
if (!doesTableExists(start, Config.getConfig(start).getUserLastActiveTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, ActiveUsersQueries.getQueryToCreateUserLastActiveTable(start), NO_OP_SETTER);
// index
update(start, ActiveUsersQueries.getQueryToCreateLastActiveTimeIndexForUserLastActiveTable(start),
NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getAccessTokenSigningKeysTable())) {
@ -259,7 +258,6 @@ public class GeneralQueries {
// index
update(start, getQueryToCreateSessionExpiryIndex(start), NO_OP_SETTER);
update(start, getQueryToCreateSessionAppIdUserIdIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getTenantConfigsTable())) {
@ -293,19 +291,12 @@ public class GeneralQueries {
if (!doesTableExists(start, Config.getConfig(start).getEmailPasswordUsersTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, EmailPasswordQueries.getQueryToCreateUsersTable(start), NO_OP_SETTER);
// index
update(start, EmailPasswordQueries.getQueryToCreateEmailPasswordUsersEmailIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getEmailPasswordUserToTenantTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, EmailPasswordQueries.getQueryToCreateEmailPasswordUserToTenantTable(start),
NO_OP_SETTER);
// index
update(start, EmailPasswordQueries.getQueryToCreateEmailPasswordUserToTenantEmailIndex(start),
NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getPasswordResetTokensTable())) {
@ -318,9 +309,6 @@ public class GeneralQueries {
if (!doesTableExists(start, Config.getConfig(start).getEmailVerificationTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, getQueryToCreateEmailVerificationTable(start), NO_OP_SETTER);
//index
update(start, getQueryToCreateEmailVerificationVerifiedEmailsAppIdIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getEmailVerificationTokensTable())) {
@ -333,7 +321,6 @@ public class GeneralQueries {
if (!doesTableExists(start, Config.getConfig(start).getThirdPartyUsersTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, ThirdPartyQueries.getQueryToCreateUsersTable(start), NO_OP_SETTER);
// index
update(start, ThirdPartyQueries.getQueryToThirdPartyUserEmailIndex(start), NO_OP_SETTER);
update(start, ThirdPartyQueries.getQueryToThirdPartyUserIdIndex(start), NO_OP_SETTER);
@ -342,9 +329,6 @@ public class GeneralQueries {
if (!doesTableExists(start, Config.getConfig(start).getThirdPartyUserToTenantTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, ThirdPartyQueries.getQueryToCreateThirdPartyUserToTenantTable(start), NO_OP_SETTER);
// index
update(start, ThirdPartyQueries.getQueryToCreateThirdPartyUserToTenantThirdPartyUserIdIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getJWTSigningKeysTable())) {
@ -355,20 +339,12 @@ public class GeneralQueries {
if (!doesTableExists(start, Config.getConfig(start).getPasswordlessUsersTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, PasswordlessQueries.getQueryToCreateUsersTable(start), NO_OP_SETTER);
// index
update(start, PasswordlessQueries.getQueryToCreatePasswordlessUsersEmailIndex(start), NO_OP_SETTER);
update(start, PasswordlessQueries.getQueryToCreatePasswordlessUsersPhoneNumberIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getPasswordlessUserToTenantTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, PasswordlessQueries.getQueryToCreatePasswordlessUserToTenantTable(start),
NO_OP_SETTER);
// index
update(start, PasswordlessQueries.getQueryToCreatePasswordlessUserToTenantEmailIndex(start), NO_OP_SETTER);
update(start, PasswordlessQueries.getQueryToCreatePasswordlessUserToTenantPhoneNumberIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getPasswordlessDevicesTable())) {
@ -410,7 +386,6 @@ public class GeneralQueries {
// index
update(start, UserRolesQueries.getQueryToCreateUserRolesRoleIndex(start), NO_OP_SETTER);
update(start, UserRolesQueries.getQueryToCreateUserRolesUserIdAppIdIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getUserIdMappingTable())) {
@ -448,103 +423,9 @@ public class GeneralQueries {
update(start, TOTPQueries.getQueryToCreateUsedCodesExpiryTimeIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getOAuthClientsTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, OAuthQueries.getQueryToCreateOAuthClientTable(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getOAuthSessionsTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, OAuthQueries.getQueryToCreateOAuthSessionsTable(start), NO_OP_SETTER);
// index
update(start, OAuthQueries.getQueryToCreateOAuthSessionsExpIndex(start), NO_OP_SETTER);
update(start, OAuthQueries.getQueryToCreateOAuthSessionsExternalRefreshTokenIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getOAuthM2MTokensTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, OAuthQueries.getQueryToCreateOAuthM2MTokensTable(start), NO_OP_SETTER);
// index
update(start, OAuthQueries.getQueryToCreateOAuthM2MTokenIatIndex(start), NO_OP_SETTER);
update(start, OAuthQueries.getQueryToCreateOAuthM2MTokenExpIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getOAuthLogoutChallengesTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, OAuthQueries.getQueryToCreateOAuthLogoutChallengesTable(start), NO_OP_SETTER);
// index
update(start, OAuthQueries.getQueryToCreateOAuthLogoutChallengesTimeCreatedIndex(start), NO_OP_SETTER);
}
if(!doesTableExists(start, Config.getConfig(start).getWebAuthNUsersTable())){
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, WebAuthNQueries.getQueryToCreateWebAuthNUsersTable(start), NO_OP_SETTER);
}
if(!doesTableExists(start, Config.getConfig(start).getWebAuthNUserToTenantTable())){
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, WebAuthNQueries.getQueryToCreateWebAuthNUsersToTenantTable(start), NO_OP_SETTER);
//index
update(start, WebAuthNQueries.getQueryToCreateWebAuthNUserToTenantEmailIndex(start), NO_OP_SETTER);
}
if(!doesTableExists(start, Config.getConfig(start).getWebAuthNGeneratedOptionsTable())){
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, WebAuthNQueries.getQueryToCreateWebAuthNGeneratedOptionsTable(start), NO_OP_SETTER);
//index
update(start, WebAuthNQueries.getQueryToCreateWebAuthNChallengeExpiresIndex(start), NO_OP_SETTER);
}
if(!doesTableExists(start, Config.getConfig(start).getWebAuthNAccountRecoveryTokenTable())){
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, WebAuthNQueries.getQueryToCreateWebAuthNAccountRecoveryTokenTable(start), NO_OP_SETTER);
//index
update(start, WebAuthNQueries.getQueryToCreateWebAuthNAccountRecoveryTokenTokenIndex(start), NO_OP_SETTER);
update(start, WebAuthNQueries.getQueryToCreateWebAuthNAccountRecoveryTokenEmailIndex(start), NO_OP_SETTER);
update(start, WebAuthNQueries.getQueryToCreateWebAuthNAccountRecoveryTokenExpiresAtIndex(start), NO_OP_SETTER);
}
if(!doesTableExists(start, Config.getConfig(start).getWebAuthNCredentialsTable())){
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, WebAuthNQueries.getQueryToCreateWebAuthNCredentialsTable(start), NO_OP_SETTER);
//index
update(start, WebAuthNQueries.getQueryToCreateWebAuthNCredentialsUserIdIndex(start), NO_OP_SETTER);
}
// SAML tables
if (!doesTableExists(start, Config.getConfig(start).getSAMLClientsTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, SAMLQueries.getQueryToCreateSAMLClientsTable(start), NO_OP_SETTER);
// indexes
update(start, SAMLQueries.getQueryToCreateSAMLClientsAppIdTenantIdIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getSAMLRelayStateTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, SAMLQueries.getQueryToCreateSAMLRelayStateTable(start), NO_OP_SETTER);
// indexes
update(start, SAMLQueries.getQueryToCreateSAMLRelayStateAppIdTenantIdIndex(start), NO_OP_SETTER);
update(start, SAMLQueries.getQueryToCreateSAMLRelayStateExpiresAtIndex(start), NO_OP_SETTER);
}
if (!doesTableExists(start, Config.getConfig(start).getSAMLClaimsTable())) {
getInstance(main).addState(CREATING_NEW_TABLE, null);
update(start, SAMLQueries.getQueryToCreateSAMLClaimsTable(start), NO_OP_SETTER);
// indexes
update(start, SAMLQueries.getQueryToCreateSAMLClaimsAppIdTenantIdIndex(start), NO_OP_SETTER);
update(start, SAMLQueries.getQueryToCreateSAMLClaimsExpiresAtIndex(start), NO_OP_SETTER);
}
}
public static void setKeyValue_Transaction(Start start, Connection con, TenantIdentifier tenantIdentifier,
String key, KeyValueInfo info)
throws SQLException, StorageQueryException {
@ -749,7 +630,7 @@ public class GeneralQueries {
// attach email tags to queries
QUERY = QUERY +
" WHERE (emailpasswordTable.app_id = ? AND emailpasswordTable.tenant_id = ?) AND"
+ " ( emailpasswordTable.email LIKE ? OR emailpasswordTable.email LIKE ? ";
+ " (emailpasswordTable.email LIKE ? OR emailpasswordTable.email LIKE ?)";
queryList.add(tenantIdentifier.getAppId());
queryList.add(tenantIdentifier.getTenantId());
queryList.add(dashboardSearchTags.emails.get(0) + "%");
@ -760,8 +641,6 @@ public class GeneralQueries {
queryList.add("%@" + dashboardSearchTags.emails.get(i) + "%");
}
QUERY += " )";
USER_SEARCH_TAG_CONDITION.append("SELECT * FROM ( ").append(QUERY)
.append(" LIMIT 1000) AS emailpasswordResultTable");
}
@ -901,45 +780,6 @@ public class GeneralQueries {
}
}
{
// check if we should search through the webauthn table
if (dashboardSearchTags.shouldWebauthnTableBeSearched()) {
String QUERY = "SELECT allAuthUsersTable.*" + " FROM " + getConfig(start).getUsersTable()
+ " AS allAuthUsersTable" +
" JOIN " + getConfig(start).getWebAuthNUserToTenantTable()
+ " AS webauthnTable ON allAuthUsersTable.app_id = webauthnTable.app_id AND "
+ "allAuthUsersTable.tenant_id = webauthnTable.tenant_id AND "
+ "allAuthUsersTable.user_id = webauthnTable.user_id";
// attach email tags to queries
QUERY = QUERY +
" WHERE (webauthnTable.app_id = ? AND webauthnTable.tenant_id = ?) AND"
+ " ( webauthnTable.email LIKE ? OR webauthnTable.email LIKE ? ";
queryList.add(tenantIdentifier.getAppId());
queryList.add(tenantIdentifier.getTenantId());
queryList.add(dashboardSearchTags.emails.get(0) + "%");
queryList.add("%@" + dashboardSearchTags.emails.get(0) + "%");
for (int i = 1; i < dashboardSearchTags.emails.size(); i++) {
QUERY += " OR webauthnTable.email LIKE ? OR webauthnTable.email LIKE ?";
queryList.add(dashboardSearchTags.emails.get(i) + "%");
queryList.add("%@" + dashboardSearchTags.emails.get(i) + "%");
}
QUERY += " )";
// check if we need to append this to an existing search query
if (USER_SEARCH_TAG_CONDITION.length() != 0) {
USER_SEARCH_TAG_CONDITION.append(" UNION ").append("SELECT * FROM ( ").append(QUERY)
.append(" LIMIT 1000) AS webauthnResultTable");
} else {
USER_SEARCH_TAG_CONDITION.append("SELECT * FROM ( ").append(QUERY)
.append(" LIMIT 1000) AS webauthnResultTable");
}
}
}
if (USER_SEARCH_TAG_CONDITION.toString().length() == 0) {
usersFromQuery = new ArrayList<>();
} else {
@ -1087,32 +927,6 @@ public class GeneralQueries {
}
}
public static void makePrimaryUsers_Transaction(Start start, Connection sqlCon, AppIdentifier appIdentifier,
List<String> userIds)
throws SQLException, StorageQueryException {
String users_update_QUERY = "UPDATE " + getConfig(start).getUsersTable() +
" SET is_linked_or_is_a_primary_user = true WHERE app_id = ? AND user_id = ?";
String appid_to_userid_update_QUERY = "UPDATE " + getConfig(start).getAppIdToUserIdTable() +
" SET is_linked_or_is_a_primary_user = true WHERE app_id = ? AND user_id = ?";
List<PreparedStatementValueSetter> usersSetter = new ArrayList<>();
List<PreparedStatementValueSetter> appIdToUserIdSetter = new ArrayList<>();
for(String userId: userIds) {
usersSetter.add(pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, userId);
});
appIdToUserIdSetter.add(pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, userId);
});
}
executeBatch(sqlCon, users_update_QUERY, usersSetter);
executeBatch(sqlCon, appid_to_userid_update_QUERY, appIdToUserIdSetter);
}
public static void linkAccounts_Transaction(Start start, Connection sqlCon, AppIdentifier appIdentifier,
String recipeUserId, String primaryUserId)
throws SQLException, StorageQueryException {
@ -1143,47 +957,6 @@ public class GeneralQueries {
}
}
public static void linkMultipleAccounts_Transaction(Start start, Connection sqlCon, AppIdentifier appIdentifier,
Map<String, String> recipeUserIdToPrimaryUserId)
throws SQLException, StorageQueryException {
if(recipeUserIdToPrimaryUserId == null || recipeUserIdToPrimaryUserId.isEmpty()){
return;
}
String update_users_QUERY = "UPDATE " + getConfig(start).getUsersTable() +
" SET is_linked_or_is_a_primary_user = true, primary_or_recipe_user_id = ? WHERE app_id = ? AND " +
"user_id = ?";
String update_appid_to_userid_QUERY = "UPDATE " + getConfig(start).getAppIdToUserIdTable() +
" SET is_linked_or_is_a_primary_user = true, primary_or_recipe_user_id = ? WHERE app_id = ? AND " +
"user_id = ?";
List<PreparedStatementValueSetter> updateUsersSetter = new ArrayList<>();
List<PreparedStatementValueSetter> updateAppIdToUserIdSetter = new ArrayList<>();
for(Map.Entry<String, String> linkEntry : recipeUserIdToPrimaryUserId.entrySet()) {
String primaryUserId = linkEntry.getValue();
String recipeUserId = linkEntry.getKey();
updateUsersSetter.add(pst -> {
pst.setString(1, primaryUserId);
pst.setString(2, appIdentifier.getAppId());
pst.setString(3, recipeUserId);
});
updateUsersSetter.add(pst -> {
pst.setString(1, primaryUserId);
pst.setString(2, appIdentifier.getAppId());
pst.setString(3, recipeUserId);
});
}
executeBatch(sqlCon, update_users_QUERY, updateUsersSetter);
executeBatch(sqlCon, update_appid_to_userid_QUERY, updateAppIdToUserIdSetter);
updateTimeJoinedForPrimaryUsers_Transaction(start, sqlCon, appIdentifier,
new ArrayList<>(recipeUserIdToPrimaryUserId.values()));
}
public static void unlinkAccounts_Transaction(Start start, Connection sqlCon, AppIdentifier appIdentifier,
String primaryUserId, String recipeUserId)
throws SQLException, StorageQueryException {
@ -1302,13 +1075,6 @@ public class GeneralQueries {
userIds.addAll(ThirdPartyQueries.getPrimaryUserIdUsingEmail_Transaction(start, sqlCon, appIdentifier, email));
String webauthnUserId = WebAuthNQueries.getPrimaryUserIdForAppUsingEmail_Transaction(start, sqlCon,
appIdentifier, email);
if(webauthnUserId != null) {
userIds.add(webauthnUserId);
}
// remove duplicates from userIds
Set<String> userIdsSet = new HashSet<>(userIds);
userIds = new ArrayList<>(userIdsSet);
@ -1340,11 +1106,6 @@ public class GeneralQueries {
userIds.addAll(ThirdPartyQueries.getPrimaryUserIdUsingEmail(start, tenantIdentifier, email));
String webauthnUserId = WebAuthNQueries.getPrimaryUserIdForTenantUsingEmail(start, tenantIdentifier, email);
if(webauthnUserId != null) {
userIds.add(webauthnUserId);
}
// remove duplicates from userIds
Set<String> userIdsSet = new HashSet<>(userIds);
userIds = new ArrayList<>(userIdsSet);
@ -1389,34 +1150,6 @@ public class GeneralQueries {
return getPrimaryUserInfoForUserId(start, tenantIdentifier.toAppIdentifier(), userId);
}
public static AuthRecipeUserInfo getPrimaryUserByWebauthNCredentialId(Start start,
TenantIdentifier tenantIdentifier,
String credentialId)
throws StorageQueryException, SQLException, StorageTransactionLogicException {
AuthRecipeUserInfo webauthnUser = start.startTransaction(con -> {
try {
Connection sqlCon = (Connection) con.getConnection();
return getPrimaryUserByWebauthNCredentialId_Transaction(start, sqlCon, tenantIdentifier,
credentialId);
} catch (SQLException e) {
throw new StorageQueryException(e);
}
});
return webauthnUser;
}
public static AuthRecipeUserInfo getPrimaryUserByWebauthNCredentialId_Transaction(Start start,
Connection connection,
TenantIdentifier tenantIdentifier,
String credentialId)
throws StorageQueryException, SQLException, StorageTransactionLogicException {
AuthRecipeUserInfo webauthnUser = WebAuthNQueries.getUserInfoByCredentialId_Transaction(start, connection,
tenantIdentifier, credentialId);
return getPrimaryUserInfoForUserId_Transaction(start, connection, tenantIdentifier.toAppIdentifier(),
webauthnUser.getSupertokensUserId());
}
public static String getPrimaryUserIdStrForUserId(Start start, AppIdentifier appIdentifier, String id)
throws SQLException, StorageQueryException {
String QUERY = "SELECT primary_or_recipe_user_id FROM " + getConfig(start).getUsersTable() +
@ -1455,17 +1188,6 @@ public class GeneralQueries {
return result.get(0);
}
public static List<AuthRecipeUserInfo> getPrimaryUsersInfoForUserIds_Transaction(Start start, Connection con,
AppIdentifier appIdentifier, List<String> ids)
throws SQLException, StorageQueryException {
List<AuthRecipeUserInfo> result = getPrimaryUserInfoForUserIds_Transaction(start, con, appIdentifier, ids);
if (result.isEmpty()) {
return null;
}
return result;
}
private static List<AuthRecipeUserInfo> getPrimaryUserInfoForUserIds(Start start,
AppIdentifier appIdentifier,
List<String> userIds)
@ -1530,7 +1252,6 @@ public class GeneralQueries {
loginMethods.addAll(ThirdPartyQueries.getUsersInfoUsingIdList(start, recipeUserIdsToFetch, appIdentifier));
loginMethods.addAll(
PasswordlessQueries.getUsersInfoUsingIdList(start, recipeUserIdsToFetch, appIdentifier));
loginMethods.addAll(WebAuthNQueries.getUsersInfoUsingIdList(start, recipeUserIdsToFetch, appIdentifier));
Map<String, LoginMethod> recipeUserIdToLoginMethodMap = new HashMap<>();
for (LoginMethod loginMethod : loginMethods) {
@ -1630,8 +1351,6 @@ public class GeneralQueries {
loginMethods.addAll(
PasswordlessQueries.getUsersInfoUsingIdList_Transaction(start, sqlCon, recipeUserIdsToFetch,
appIdentifier));
loginMethods.addAll(WebAuthNQueries.getUsersInfoUsingIdList_Transaction(start, sqlCon, recipeUserIdsToFetch,
appIdentifier));
Map<String, LoginMethod> recipeUserIdToLoginMethodMap = new HashMap<>();
for (LoginMethod loginMethod : loginMethods) {
@ -1918,27 +1637,6 @@ public class GeneralQueries {
});
}
public static void updateTimeJoinedForPrimaryUsers_Transaction(Start start, Connection sqlCon,
AppIdentifier appIdentifier, List<String> primaryUserIds)
throws SQLException, StorageQueryException {
String QUERY = "UPDATE " + getConfig(start).getUsersTable() +
" SET primary_or_recipe_user_time_joined = (SELECT MIN(time_joined) FROM " +
getConfig(start).getUsersTable() + " WHERE app_id = ? AND primary_or_recipe_user_id = ?) WHERE " +
" app_id = ? AND primary_or_recipe_user_id = ?";
List<PreparedStatementValueSetter> setters = new ArrayList<>();
for(String primaryUserId : primaryUserIds) {
setters.add(pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, primaryUserId);
pst.setString(3, appIdentifier.getAppId());
pst.setString(4, primaryUserId);
});
}
executeBatch(sqlCon, QUERY, setters);
}
private static class AllAuthRecipeUsersResultHolder {
String userId;
String tenantId;

View File

@ -1,464 +0,0 @@
/*
* Copyright (c) 2024, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.inmemorydb.queries;
import io.supertokens.inmemorydb.Start;
import io.supertokens.inmemorydb.Utils;
import io.supertokens.inmemorydb.config.Config;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.AppIdentifier;
import io.supertokens.pluginInterface.oauth.OAuthClient;
import io.supertokens.pluginInterface.oauth.OAuthLogoutChallenge;
import org.jetbrains.annotations.NotNull;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.execute;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.update;
public class OAuthQueries {
public static String getQueryToCreateOAuthClientTable(Start start) {
String oAuth2ClientTable = Config.getConfig(start).getOAuthClientsTable();
// @formatter:off
return "CREATE TABLE IF NOT EXISTS " + oAuth2ClientTable + " ("
+ "app_id VARCHAR(64),"
+ "client_id VARCHAR(255) NOT NULL,"
+ "client_secret TEXT,"
+ "enable_refresh_token_rotation BOOLEAN NOT NULL,"
+ "is_client_credentials_only BOOLEAN NOT NULL,"
+ " PRIMARY KEY (app_id, client_id),"
+ " FOREIGN KEY(app_id) REFERENCES " + Config.getConfig(start).getAppsTable() + "(app_id) ON DELETE CASCADE);";
// @formatter:on
}
public static String getQueryToCreateOAuthSessionsTable(Start start) {
String oAuthSessionsTable = Config.getConfig(start).getOAuthSessionsTable();
// @formatter:off
return "CREATE TABLE IF NOT EXISTS " + oAuthSessionsTable + " ("
+ "gid VARCHAR(255)," // needed for instrospect. It's much easier to find these records if we have a gid
+ "app_id VARCHAR(64) DEFAULT 'public',"
+ "client_id VARCHAR(255) NOT NULL,"
+ "session_handle VARCHAR(128),"
+ "external_refresh_token VARCHAR(255) UNIQUE,"
+ "internal_refresh_token VARCHAR(255) UNIQUE,"
+ "jti TEXT NOT NULL," // comma separated jti list
+ "exp BIGINT NOT NULL,"
+ "PRIMARY KEY (gid),"
+ "FOREIGN KEY(app_id, client_id) REFERENCES " + Config.getConfig(start).getOAuthClientsTable() + "(app_id, client_id) ON DELETE CASCADE);";
// @formatter:on
}
public static String getQueryToCreateOAuthSessionsExpIndex(Start start) {
String oAuth2SessionTable = Config.getConfig(start).getOAuthSessionsTable();
return "CREATE INDEX IF NOT EXISTS oauth_session_exp_index ON "
+ oAuth2SessionTable + "(exp DESC);";
}
public static String getQueryToCreateOAuthSessionsExternalRefreshTokenIndex(Start start) {
String oAuth2SessionTable = Config.getConfig(start).getOAuthSessionsTable();
return "CREATE INDEX IF NOT EXISTS oauth_session_external_refresh_token_index ON "
+ oAuth2SessionTable + "(app_id, external_refresh_token DESC);";
}
public static String getQueryToCreateOAuthM2MTokensTable(Start start) {
String oAuth2M2MTokensTable = Config.getConfig(start).getOAuthM2MTokensTable();
// @formatter:off
return "CREATE TABLE IF NOT EXISTS " + oAuth2M2MTokensTable + " ("
+ "app_id VARCHAR(64) DEFAULT 'public',"
+ "client_id VARCHAR(255) NOT NULL,"
+ "iat BIGINT NOT NULL,"
+ "exp BIGINT NOT NULL,"
+ "PRIMARY KEY (app_id, client_id, iat),"
+ "FOREIGN KEY(app_id, client_id)"
+ " REFERENCES " + Config.getConfig(start).getOAuthClientsTable() + "(app_id, client_id) ON DELETE CASCADE"
+ ");";
// @formatter:on
}
public static String getQueryToCreateOAuthM2MTokenIatIndex(Start start) {
String oAuth2M2MTokensTable = Config.getConfig(start).getOAuthM2MTokensTable();
return "CREATE INDEX IF NOT EXISTS oauth_m2m_token_iat_index ON "
+ oAuth2M2MTokensTable + "(iat DESC, app_id DESC);";
}
public static String getQueryToCreateOAuthM2MTokenExpIndex(Start start) {
String oAuth2M2MTokensTable = Config.getConfig(start).getOAuthM2MTokensTable();
return "CREATE INDEX IF NOT EXISTS oauth_m2m_token_exp_index ON "
+ oAuth2M2MTokensTable + "(exp DESC);";
}
public static String getQueryToCreateOAuthLogoutChallengesTable(Start start) {
String oAuth2LogoutChallengesTable = Config.getConfig(start).getOAuthLogoutChallengesTable();
// @formatter:off
return "CREATE TABLE IF NOT EXISTS " + oAuth2LogoutChallengesTable + " ("
+ "app_id VARCHAR(64) DEFAULT 'public',"
+ "challenge VARCHAR(128) NOT NULL,"
+ "client_id VARCHAR(255) NOT NULL,"
+ "post_logout_redirect_uri VARCHAR(1024),"
+ "session_handle VARCHAR(128),"
+ "state VARCHAR(128),"
+ "time_created BIGINT NOT NULL,"
+ "PRIMARY KEY (app_id, challenge),"
+ "FOREIGN KEY(app_id, client_id)"
+ " REFERENCES " + Config.getConfig(start).getOAuthClientsTable() + "(app_id, client_id) ON DELETE CASCADE"
+ ");";
// @formatter:on
}
public static String getQueryToCreateOAuthLogoutChallengesTimeCreatedIndex(Start start) {
String oAuth2LogoutChallengesTable = Config.getConfig(start).getOAuthLogoutChallengesTable();
return "CREATE INDEX IF NOT EXISTS oauth_logout_challenges_time_created_index ON "
+ oAuth2LogoutChallengesTable + "(time_created DESC);";
}
public static OAuthClient getOAuthClientById(Start start, String clientId, AppIdentifier appIdentifier)
throws SQLException, StorageQueryException {
String QUERY = "SELECT client_secret, is_client_credentials_only, enable_refresh_token_rotation FROM " + Config.getConfig(start).getOAuthClientsTable() +
" WHERE client_id = ? AND app_id = ?";
return execute(start, QUERY, pst -> {
pst.setString(1, clientId);
pst.setString(2, appIdentifier.getAppId());
}, (result) -> {
if (result.next()) {
return new OAuthClient(clientId, result.getString("client_secret"), result.getBoolean("is_client_credentials_only"), result.getBoolean("enable_refresh_token_rotation"));
}
return null;
});
}
public static void createOrUpdateOAuthSession(Start start, AppIdentifier appIdentifier, @NotNull String gid, @NotNull String clientId,
String externalRefreshToken, String internalRefreshToken, String sessionHandle,
String jti, long exp)
throws SQLException, StorageQueryException {
String QUERY = "INSERT INTO " + Config.getConfig(start).getOAuthSessionsTable() +
" (gid, client_id, app_id, external_refresh_token, internal_refresh_token, session_handle, jti, exp) VALUES (?, ?, ?, ?, ?, ?, ?, ?) " +
"ON CONFLICT (gid) DO UPDATE SET external_refresh_token = ?, internal_refresh_token = ?, " +
"session_handle = ? , jti = CONCAT(jti, ?), exp = ?";
update(start, QUERY, pst -> {
String jtiToInsert = jti + ","; //every jti value ends with ','
pst.setString(1, gid);
pst.setString(2, clientId);
pst.setString(3, appIdentifier.getAppId());
pst.setString(4, externalRefreshToken);
pst.setString(5, internalRefreshToken);
pst.setString(6, sessionHandle);
pst.setString(7, jtiToInsert);
pst.setLong(8, exp);
pst.setString(9, externalRefreshToken);
pst.setString(10, internalRefreshToken);
pst.setString(11, sessionHandle);
pst.setString(12, jtiToInsert);
pst.setLong(13, exp);
});
}
public static List<OAuthClient> getOAuthClients(Start start, AppIdentifier appIdentifier, List<String> clientIds)
throws SQLException, StorageQueryException {
String QUERY = "SELECT * FROM " + Config.getConfig(start).getOAuthClientsTable()
+ " WHERE app_id = ? AND client_id IN ("
+ Utils.generateCommaSeperatedQuestionMarks(clientIds.size())
+ ")";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
for (int i = 0; i < clientIds.size(); i++) {
pst.setString(i + 2, clientIds.get(i));
}
}, (result) -> {
List<OAuthClient> res = new ArrayList<>();
while (result.next()) {
res.add(new OAuthClient(result.getString("client_id"), result.getString("client_secret"), result.getBoolean("is_client_credentials_only"), result.getBoolean("enable_refresh_token_rotation")));
}
return res;
});
}
public static void addOrUpdateOauthClient(Start start, AppIdentifier appIdentifier, String clientId, String clientSecret,
boolean isClientCredentialsOnly, boolean enableRefreshTokenRotation)
throws SQLException, StorageQueryException {
String INSERT = "INSERT INTO " + Config.getConfig(start).getOAuthClientsTable()
+ "(app_id, client_id, client_secret, is_client_credentials_only, enable_refresh_token_rotation) VALUES(?, ?, ?, ?, ?) "
+ "ON CONFLICT (app_id, client_id) DO UPDATE SET client_secret = ?, is_client_credentials_only = ?, enable_refresh_token_rotation = ?";
update(start, INSERT, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, clientId);
pst.setString(3, clientSecret);
pst.setBoolean(4, isClientCredentialsOnly);
pst.setBoolean(5, enableRefreshTokenRotation);
pst.setString(6, clientSecret);
pst.setBoolean(7, isClientCredentialsOnly);
pst.setBoolean(8, enableRefreshTokenRotation);
});
}
public static boolean deleteOAuthClient(Start start, String clientId, AppIdentifier appIdentifier)
throws SQLException, StorageQueryException {
String DELETE = "DELETE FROM " + Config.getConfig(start).getOAuthClientsTable()
+ " WHERE app_id = ? AND client_id = ?";
int numberOfRow = update(start, DELETE, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, clientId);
});
return numberOfRow > 0;
}
public static boolean deleteOAuthSessionByGID(Start start, AppIdentifier appIdentifier, String gid)
throws SQLException, StorageQueryException {
String DELETE = "DELETE FROM " + Config.getConfig(start).getOAuthSessionsTable()
+ " WHERE gid = ? and app_id = ?;";
int numberOfRows = update(start, DELETE, pst -> {
pst.setString(1, gid);
pst.setString(2, appIdentifier.getAppId());
});
return numberOfRows > 0;
}
public static boolean deleteOAuthSessionByClientId(Start start, AppIdentifier appIdentifier, String clientId)
throws SQLException, StorageQueryException {
String DELETE = "DELETE FROM " + Config.getConfig(start).getOAuthSessionsTable()
+ " WHERE app_id = ? and client_id = ?;";
int numberOfRows = update(start, DELETE, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, clientId);
});
return numberOfRows > 0;
}
public static boolean deleteOAuthSessionBySessionHandle(Start start, AppIdentifier appIdentifier, String sessionHandle)
throws SQLException, StorageQueryException {
String DELETE = "DELETE FROM " + Config.getConfig(start).getOAuthSessionsTable()
+ " WHERE app_id = ? and session_handle = ?";
int numberOfRows = update(start, DELETE, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, sessionHandle);
});
return numberOfRows > 0;
}
public static boolean deleteJTIFromOAuthSession(Start start, AppIdentifier appIdentifier, String gid, String jti)
throws SQLException, StorageQueryException {
//jti is a comma separated list. When deleting a jti, just have to delete from the list
String DELETE = "UPDATE " + Config.getConfig(start).getOAuthSessionsTable()
+ " SET jti = REPLACE(jti, ?, '')" // deletion means replacing the jti with empty char
+ " WHERE app_id = ? and gid = ?";
int numberOfRows = update(start, DELETE, pst -> {
pst.setString(1, jti + ",");
pst.setString(2, appIdentifier.getAppId());
pst.setString(3, gid);
});
return numberOfRows > 0;
}
public static int countTotalNumberOfClients(Start start, AppIdentifier appIdentifier,
boolean filterByClientCredentialsOnly) throws SQLException, StorageQueryException {
if (filterByClientCredentialsOnly) {
String QUERY = "SELECT COUNT(*) as c FROM " + Config.getConfig(start).getOAuthClientsTable() +
" WHERE app_id = ? AND is_client_credentials_only = ?";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setBoolean(2, true);
}, result -> {
if (result.next()) {
return result.getInt("c");
}
return 0;
});
} else {
String QUERY = "SELECT COUNT(*) as c FROM " + Config.getConfig(start).getOAuthClientsTable() +
" WHERE app_id = ?";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
}, result -> {
if (result.next()) {
return result.getInt("c");
}
return 0;
});
}
}
public static int countTotalNumberOfOAuthM2MTokensAlive(Start start, AppIdentifier appIdentifier)
throws SQLException, StorageQueryException {
String QUERY = "SELECT COUNT(*) as c FROM " + Config.getConfig(start).getOAuthM2MTokensTable() +
" WHERE app_id = ? AND exp > ?";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setLong(2, System.currentTimeMillis()/1000);
}, result -> {
if (result.next()) {
return result.getInt("c");
}
return 0;
});
}
public static int countTotalNumberOfOAuthM2MTokensCreatedSince(Start start, AppIdentifier appIdentifier, long since)
throws SQLException, StorageQueryException {
String QUERY = "SELECT COUNT(*) as c FROM " + Config.getConfig(start).getOAuthM2MTokensTable() +
" WHERE app_id = ? AND iat >= ?";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setLong(2, since / 1000);
}, result -> {
if (result.next()) {
return result.getInt("c");
}
return 0;
});
}
public static void addOAuthM2MTokenForStats(Start start, AppIdentifier appIdentifier, String clientId, long iat, long exp)
throws SQLException, StorageQueryException {
String QUERY = "INSERT INTO " + Config.getConfig(start).getOAuthM2MTokensTable() +
" (app_id, client_id, iat, exp) VALUES (?, ?, ?, ?) ON CONFLICT DO NOTHING";
update(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, clientId);
pst.setLong(3, iat);
pst.setLong(4, exp);
});
}
public static void addOAuthLogoutChallenge(Start start, AppIdentifier appIdentifier, String challenge, String clientId,
String postLogoutRedirectionUri, String sessionHandle, String state, long timeCreated) throws SQLException, StorageQueryException {
String QUERY = "INSERT INTO " + Config.getConfig(start).getOAuthLogoutChallengesTable() +
" (app_id, challenge, client_id, post_logout_redirect_uri, session_handle, state, time_created) VALUES (?, ?, ?, ?, ?, ?, ?)";
update(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, challenge);
pst.setString(3, clientId);
pst.setString(4, postLogoutRedirectionUri);
pst.setString(5, sessionHandle);
pst.setString(6, state);
pst.setLong(7, timeCreated);
});
}
public static OAuthLogoutChallenge getOAuthLogoutChallenge(Start start, AppIdentifier appIdentifier, String challenge) throws SQLException, StorageQueryException {
String QUERY = "SELECT challenge, client_id, post_logout_redirect_uri, session_handle, state, time_created FROM " +
Config.getConfig(start).getOAuthLogoutChallengesTable() +
" WHERE app_id = ? AND challenge = ?";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, challenge);
}, result -> {
if (result.next()) {
return new OAuthLogoutChallenge(
result.getString("challenge"),
result.getString("client_id"),
result.getString("post_logout_redirect_uri"),
result.getString("session_handle"),
result.getString("state"),
result.getLong("time_created")
);
}
return null;
});
}
public static void deleteOAuthLogoutChallenge(Start start, AppIdentifier appIdentifier, String challenge) throws SQLException, StorageQueryException {
String QUERY = "DELETE FROM " + Config.getConfig(start).getOAuthLogoutChallengesTable() +
" WHERE app_id = ? AND challenge = ?";
update(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, challenge);
});
}
public static void deleteOAuthLogoutChallengesBefore(Start start, long time) throws SQLException, StorageQueryException {
String QUERY = "DELETE FROM " + Config.getConfig(start).getOAuthLogoutChallengesTable() +
" WHERE time_created < ?";
update(start, QUERY, pst -> {
pst.setLong(1, time);
});
}
public static String getRefreshTokenMapping(Start start, AppIdentifier appIdentifier, String externalRefreshToken) throws SQLException, StorageQueryException {
String QUERY = "SELECT internal_refresh_token FROM " + Config.getConfig(start).getOAuthSessionsTable() +
" WHERE app_id = ? AND external_refresh_token = ?";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, externalRefreshToken);
}, result -> {
if (result.next()) {
return result.getString("internal_refresh_token");
}
return null;
});
}
public static void deleteExpiredOAuthSessions(Start start, long exp) throws SQLException, StorageQueryException {
// delete expired M2M tokens
String QUERY = "DELETE FROM " + Config.getConfig(start).getOAuthSessionsTable() +
" WHERE exp < ?";
update(start, QUERY, pst -> {
pst.setLong(1, exp);
});
}
public static void deleteExpiredOAuthM2MTokens(Start start, long exp) throws SQLException, StorageQueryException {
// delete expired M2M tokens
String QUERY = "DELETE FROM " + Config.getConfig(start).getOAuthM2MTokensTable() +
" WHERE exp < ?";
update(start, QUERY, pst -> {
pst.setLong(1, exp);
});
}
public static boolean isOAuthSessionExistsByJTI(Start start, AppIdentifier appIdentifier, String gid, String jti)
throws SQLException, StorageQueryException {
String SELECT = "SELECT jti FROM " + Config.getConfig(start).getOAuthSessionsTable()
+ " WHERE app_id = ? and gid = ?;";
return execute(start, SELECT, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, gid);
}, result -> {
if(result.next()){
List<String> jtis = Arrays.stream(result.getString(1).split(",")).filter(s -> !s.isEmpty()).collect(
Collectors.toList());
return jtis.contains(jti);
}
return false;
});
}
public static boolean isOAuthSessionExistsByGID(Start start, AppIdentifier appIdentifier, String gid)
throws SQLException, StorageQueryException {
String SELECT = "SELECT count(*) FROM " + Config.getConfig(start).getOAuthSessionsTable()
+ " WHERE app_id = ? and gid = ?;";
return execute(start, SELECT, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, gid);
}, result -> {
if(result.next()){
return result.getInt(1) > 0;
}
return false;
});
}
}

View File

@ -60,16 +60,6 @@ public class PasswordlessQueries {
+ ");";
}
static String getQueryToCreatePasswordlessUsersEmailIndex(Start start) {
return "CREATE INDEX passwordless_users_email_index ON "
+ Config.getConfig(start).getPasswordlessUsersTable() + "(app_id, email);";
}
static String getQueryToCreatePasswordlessUsersPhoneNumberIndex(Start start) {
return "CREATE INDEX passwordless_users_phone_number_index ON "
+ Config.getConfig(start).getPasswordlessUsersTable() + "(app_id, phone_number);";
}
static String getQueryToCreatePasswordlessUserToTenantTable(Start start) {
String passwordlessUserToTenantTable = Config.getConfig(start).getPasswordlessUserToTenantTable();
// @formatter:off
@ -89,16 +79,6 @@ public class PasswordlessQueries {
// @formatter:on
}
static String getQueryToCreatePasswordlessUserToTenantEmailIndex(Start start) {
return "CREATE INDEX passwordless_user_to_tenant_email_index ON "
+ Config.getConfig(start).getPasswordlessUserToTenantTable() + "(app_id, tenant_id, email);";
}
static String getQueryToCreatePasswordlessUserToTenantPhoneNumberIndex(Start start) {
return "CREATE INDEX passwordless_user_to_tenant_phone_number_index ON "
+ Config.getConfig(start).getPasswordlessUserToTenantTable() + "(app_id, tenant_id, phone_number);";
}
public static String getQueryToCreateDevicesTable(Start start) {
return "CREATE TABLE IF NOT EXISTS " + Config.getConfig(start).getPasswordlessDevicesTable() + " ("
+ "app_id VARCHAR(64) DEFAULT 'public',"

View File

@ -1,458 +0,0 @@
/*
* Copyright (c) 2025, VRAI Labs and/or its affiliates. All rights reserved.
*
* This software is licensed under the Apache License, Version 2.0 (the
* "License") as published by the Apache Software Foundation.
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.supertokens.inmemorydb.queries;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.execute;
import static io.supertokens.inmemorydb.QueryExecutorTemplate.update;
import io.supertokens.inmemorydb.Start;
import io.supertokens.inmemorydb.config.Config;
import io.supertokens.pluginInterface.exceptions.StorageQueryException;
import io.supertokens.pluginInterface.multitenancy.TenantIdentifier;
import io.supertokens.pluginInterface.saml.SAMLClaimsInfo;
import io.supertokens.pluginInterface.saml.SAMLClient;
import io.supertokens.pluginInterface.saml.SAMLRelayStateInfo;
public class SAMLQueries {
public static String getQueryToCreateSAMLClientsTable(Start start) {
String table = Config.getConfig(start).getSAMLClientsTable();
String tenantsTable = Config.getConfig(start).getTenantsTable();
// @formatter:off
return "CREATE TABLE IF NOT EXISTS " + table + " ("
+ "app_id VARCHAR(64) NOT NULL DEFAULT 'public',"
+ "tenant_id VARCHAR(64) NOT NULL DEFAULT 'public',"
+ "client_id VARCHAR(255) NOT NULL,"
+ "client_secret TEXT,"
+ "sso_login_url TEXT NOT NULL,"
+ "redirect_uris TEXT NOT NULL," // store JsonArray.toString()
+ "default_redirect_uri VARCHAR(1024) NOT NULL,"
+ "idp_entity_id VARCHAR(1024),"
+ "idp_signing_certificate TEXT,"
+ "allow_idp_initiated_login BOOLEAN NOT NULL DEFAULT FALSE,"
+ "enable_request_signing BOOLEAN NOT NULL DEFAULT TRUE,"
+ "created_at BIGINT NOT NULL,"
+ "updated_at BIGINT NOT NULL,"
+ "UNIQUE (app_id, tenant_id, idp_entity_id),"
+ "PRIMARY KEY (app_id, tenant_id, client_id),"
+ "FOREIGN KEY (app_id, tenant_id) REFERENCES " + tenantsTable + " (app_id, tenant_id) ON DELETE CASCADE"
+ ");";
// @formatter:on
}
public static String getQueryToCreateSAMLClientsAppIdTenantIdIndex(Start start) {
String table = Config.getConfig(start).getSAMLClientsTable();
return "CREATE INDEX IF NOT EXISTS saml_clients_app_tenant_index ON " + table + "(app_id, tenant_id);";
}
public static String getQueryToCreateSAMLRelayStateTable(Start start) {
String table = Config.getConfig(start).getSAMLRelayStateTable();
String tenantsTable = Config.getConfig(start).getTenantsTable();
// @formatter:off
return "CREATE TABLE IF NOT EXISTS " + table + " ("
+ "app_id VARCHAR(64) NOT NULL DEFAULT 'public',"
+ "tenant_id VARCHAR(64) NOT NULL DEFAULT 'public',"
+ "relay_state VARCHAR(255) NOT NULL,"
+ "client_id VARCHAR(255) NOT NULL,"
+ "state TEXT,"
+ "redirect_uri VARCHAR(1024) NOT NULL,"
+ "created_at BIGINT NOT NULL,"
+ "expires_at BIGINT NOT NULL,"
+ "PRIMARY KEY (relay_state)," // relayState must be unique
+ "FOREIGN KEY (app_id, tenant_id) REFERENCES " + tenantsTable + " (app_id, tenant_id) ON DELETE CASCADE"
+ ");";
// @formatter:on
}
public static String getQueryToCreateSAMLRelayStateAppIdTenantIdIndex(Start start) {
String table = Config.getConfig(start).getSAMLRelayStateTable();
return "CREATE INDEX IF NOT EXISTS saml_relay_state_app_tenant_index ON " + table + "(app_id, tenant_id);";
}
public static String getQueryToCreateSAMLRelayStateExpiresAtIndex(Start start) {
String table = Config.getConfig(start).getSAMLRelayStateTable();
return "CREATE INDEX IF NOT EXISTS saml_relay_state_expires_at_index ON " + table + "(expires_at);";
}
public static String getQueryToCreateSAMLClaimsTable(Start start) {
String table = Config.getConfig(start).getSAMLClaimsTable();
String tenantsTable = Config.getConfig(start).getTenantsTable();
// @formatter:off
return "CREATE TABLE IF NOT EXISTS " + table + " ("
+ "app_id VARCHAR(64) NOT NULL DEFAULT 'public',"
+ "tenant_id VARCHAR(64) NOT NULL DEFAULT 'public',"
+ "client_id VARCHAR(255) NOT NULL,"
+ "code VARCHAR(255) NOT NULL,"
+ "claims TEXT NOT NULL,"
+ "created_at BIGINT NOT NULL,"
+ "expires_at BIGINT NOT NULL,"
+ "PRIMARY KEY (code),"
+ "FOREIGN KEY (app_id, tenant_id) REFERENCES " + tenantsTable + " (app_id, tenant_id) ON DELETE CASCADE"
+ ");";
// @formatter:on
}
public static String getQueryToCreateSAMLClaimsAppIdTenantIdIndex(Start start) {
String table = Config.getConfig(start).getSAMLClaimsTable();
return "CREATE INDEX IF NOT EXISTS saml_claims_app_tenant_index ON " + table + "(app_id, tenant_id);";
}
public static String getQueryToCreateSAMLClaimsExpiresAtIndex(Start start) {
String table = Config.getConfig(start).getSAMLClaimsTable();
return "CREATE INDEX IF NOT EXISTS saml_claims_expires_at_index ON " + table + "(expires_at);";
}
public static void saveRelayStateInfo(Start start, TenantIdentifier tenantIdentifier,
String relayState, String clientId, String state, String redirectURI, long relayStateValidity)
throws StorageQueryException {
String table = Config.getConfig(start).getSAMLRelayStateTable();
String QUERY = "INSERT INTO " + table +
" (app_id, tenant_id, relay_state, client_id, state, redirect_uri, created_at, expires_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)";
try {
update(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, relayState);
pst.setString(4, clientId);
if (state != null) {
pst.setString(5, state);
} else {
pst.setNull(5, java.sql.Types.VARCHAR);
}
pst.setString(6, redirectURI);
pst.setLong(7, System.currentTimeMillis());
pst.setLong(8, System.currentTimeMillis() + relayStateValidity);
});
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static SAMLRelayStateInfo getRelayStateInfo(Start start, TenantIdentifier tenantIdentifier, String relayState)
throws StorageQueryException {
String table = Config.getConfig(start).getSAMLRelayStateTable();
String QUERY = "SELECT client_id, state, redirect_uri, expires_at FROM " + table
+ " WHERE app_id = ? AND tenant_id = ? AND relay_state = ? AND expires_at >= ?";
try {
return execute(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, relayState);
pst.setLong(4, System.currentTimeMillis());
}, result -> {
if (result.next()) {
String clientId = result.getString("client_id");
String state = result.getString("state"); // may be null
String redirectURI = result.getString("redirect_uri");
return new SAMLRelayStateInfo(relayState, clientId, state, redirectURI);
}
return null;
});
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static void saveSAMLClaims(Start start, TenantIdentifier tenantIdentifier, String clientId, String code, String claimsJson, long claimsValidity)
throws StorageQueryException {
String table = Config.getConfig(start).getSAMLClaimsTable();
String QUERY = "INSERT INTO " + table +
" (app_id, tenant_id, client_id, code, claims, created_at, expires_at) VALUES (?, ?, ?, ?, ?, ?, ?)";
try {
update(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, clientId);
pst.setString(4, code);
pst.setString(5, claimsJson);
pst.setLong(6, System.currentTimeMillis());
pst.setLong(7, System.currentTimeMillis() + claimsValidity);
});
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static SAMLClaimsInfo getSAMLClaimsAndRemoveCode(Start start, TenantIdentifier tenantIdentifier, String code)
throws StorageQueryException {
String table = Config.getConfig(start).getSAMLClaimsTable();
String QUERY = "SELECT client_id, claims FROM " + table + " WHERE app_id = ? AND tenant_id = ? AND code = ? AND expires_at >= ?";
try {
SAMLClaimsInfo claimsInfo = execute(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, code);
pst.setLong(4, System.currentTimeMillis());
}, result -> {
if (result.next()) {
String clientId = result.getString("client_id");
JsonObject claims = com.google.gson.JsonParser.parseString(result.getString("claims")).getAsJsonObject();
return new SAMLClaimsInfo(clientId, claims);
}
return null;
});
if (claimsInfo != null) {
String DELETE = "DELETE FROM " + table + " WHERE app_id = ? AND tenant_id = ? AND code = ?";
update(start, DELETE, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, code);
});
}
return claimsInfo;
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static SAMLClient createOrUpdateSAMLClient(
Start start,
TenantIdentifier tenantIdentifier,
String clientId,
String clientSecret,
String ssoLoginURL,
String redirectURIsJson,
String defaultRedirectURI,
String idpEntityId,
String idpSigningCertificate,
boolean allowIDPInitiatedLogin,
boolean enableRequestSigning)
throws StorageQueryException, SQLException {
String table = Config.getConfig(start).getSAMLClientsTable();
String QUERY = "INSERT INTO " + table +
" (app_id, tenant_id, client_id, client_secret, sso_login_url, redirect_uris, default_redirect_uri, idp_entity_id, idp_signing_certificate, allow_idp_initiated_login, enable_request_signing, created_at, updated_at) " +
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) " +
"ON CONFLICT (app_id, tenant_id, client_id) DO UPDATE SET " +
"client_secret = ?, sso_login_url = ?, redirect_uris = ?, default_redirect_uri = ?, idp_entity_id = ?, idp_signing_certificate = ?, allow_idp_initiated_login = ?, enable_request_signing = ?, updated_at = ?";
long now = System.currentTimeMillis();
update(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, clientId);
if (clientSecret != null) {
pst.setString(4, clientSecret);
} else {
pst.setNull(4, Types.VARCHAR);
}
pst.setString(5, ssoLoginURL);
pst.setString(6, redirectURIsJson);
pst.setString(7, defaultRedirectURI);
if (idpEntityId != null) {
pst.setString(8, idpEntityId);
} else {
pst.setNull(8, java.sql.Types.VARCHAR);
}
if (idpSigningCertificate != null) {
pst.setString(9, idpSigningCertificate);
} else {
pst.setNull(9, Types.VARCHAR);
}
pst.setBoolean(10, allowIDPInitiatedLogin);
pst.setBoolean(11, enableRequestSigning);
pst.setLong(12, now);
pst.setLong(13, now);
if (clientSecret != null) {
pst.setString(14, clientSecret);
} else {
pst.setNull(14, Types.VARCHAR);
}
pst.setString(15, ssoLoginURL);
pst.setString(16, redirectURIsJson);
pst.setString(17, defaultRedirectURI);
if (idpEntityId != null) {
pst.setString(18, idpEntityId);
} else {
pst.setNull(18, java.sql.Types.VARCHAR);
}
if (idpSigningCertificate != null) {
pst.setString(19, idpSigningCertificate);
} else {
pst.setNull(19, Types.VARCHAR);
}
pst.setBoolean(20, allowIDPInitiatedLogin);
pst.setBoolean(21, enableRequestSigning);
pst.setLong(22, now);
});
return getSAMLClient(start, tenantIdentifier, clientId);
}
public static SAMLClient getSAMLClient(Start start, TenantIdentifier tenantIdentifier, String clientId)
throws StorageQueryException {
String table = Config.getConfig(start).getSAMLClientsTable();
String QUERY = "SELECT client_id, client_secret, sso_login_url, redirect_uris, default_redirect_uri, idp_entity_id, idp_signing_certificate, allow_idp_initiated_login, enable_request_signing FROM " + table
+ " WHERE app_id = ? AND tenant_id = ? AND client_id = ?";
try {
return execute(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, clientId);
}, result -> {
if (result.next()) {
String fetchedClientId = result.getString("client_id");
String clientSecret = result.getString("client_secret");
String ssoLoginURL = result.getString("sso_login_url");
String redirectUrisJson = result.getString("redirect_uris");
String defaultRedirectURI = result.getString("default_redirect_uri");
String idpEntityId = result.getString("idp_entity_id");
String idpSigningCertificate = result.getString("idp_signing_certificate");
boolean allowIDPInitiatedLogin = result.getBoolean("allow_idp_initiated_login");
boolean enableRequestSigning = result.getBoolean("enable_request_signing");
JsonArray redirectURIs = JsonParser.parseString(redirectUrisJson).getAsJsonArray();
return new SAMLClient(fetchedClientId, clientSecret, ssoLoginURL, redirectURIs, defaultRedirectURI, idpEntityId, idpSigningCertificate, allowIDPInitiatedLogin, enableRequestSigning);
}
return null;
});
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static SAMLClient getSAMLClientByIDPEntityId(Start start, TenantIdentifier tenantIdentifier, String idpEntityId) throws StorageQueryException {
String table = Config.getConfig(start).getSAMLClientsTable();
String QUERY = "SELECT client_id, client_secret, sso_login_url, redirect_uris, default_redirect_uri, idp_entity_id, idp_signing_certificate, allow_idp_initiated_login, enable_request_signing FROM " + table
+ " WHERE app_id = ? AND tenant_id = ? AND idp_entity_id = ?";
try {
return execute(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, idpEntityId);
}, result -> {
if (result.next()) {
String fetchedClientId = result.getString("client_id");
String clientSecret = result.getString("client_secret");
String ssoLoginURL = result.getString("sso_login_url");
String redirectUrisJson = result.getString("redirect_uris");
String defaultRedirectURI = result.getString("default_redirect_uri");
String fetchedIdpEntityId = result.getString("idp_entity_id");
String idpSigningCertificate = result.getString("idp_signing_certificate");
boolean allowIDPInitiatedLogin = result.getBoolean("allow_idp_initiated_login");
boolean enableRequestSigning = result.getBoolean("enable_request_signing");
JsonArray redirectURIs = JsonParser.parseString(redirectUrisJson).getAsJsonArray();
return new SAMLClient(fetchedClientId, clientSecret, ssoLoginURL, redirectURIs, defaultRedirectURI, fetchedIdpEntityId, idpSigningCertificate, allowIDPInitiatedLogin, enableRequestSigning);
}
return null;
});
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static List<SAMLClient> getSAMLClients(Start start, TenantIdentifier tenantIdentifier)
throws StorageQueryException {
String table = Config.getConfig(start).getSAMLClientsTable();
String QUERY = "SELECT client_id, client_secret, sso_login_url, redirect_uris, default_redirect_uri, idp_entity_id, idp_signing_certificate, allow_idp_initiated_login, enable_request_signing FROM " + table
+ " WHERE app_id = ? AND tenant_id = ?";
try {
return execute(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
}, result -> {
List<SAMLClient> clients = new ArrayList<>();
while (result.next()) {
String fetchedClientId = result.getString("client_id");
String clientSecret = result.getString("client_secret");
String ssoLoginURL = result.getString("sso_login_url");
String redirectUrisJson = result.getString("redirect_uris");
String defaultRedirectURI = result.getString("default_redirect_uri");
String idpEntityId = result.getString("idp_entity_id");
String idpSigningCertificate = result.getString("idp_signing_certificate");
boolean allowIDPInitiatedLogin = result.getBoolean("allow_idp_initiated_login");
boolean enableRequestSigning = result.getBoolean("enable_request_signing");
JsonArray redirectURIs = JsonParser.parseString(redirectUrisJson).getAsJsonArray();
clients.add(new SAMLClient(fetchedClientId, clientSecret, ssoLoginURL, redirectURIs, defaultRedirectURI, idpEntityId, idpSigningCertificate, allowIDPInitiatedLogin, enableRequestSigning));
}
return clients;
});
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static boolean removeSAMLClient(Start start, TenantIdentifier tenantIdentifier, String clientId)
throws StorageQueryException {
String table = Config.getConfig(start).getSAMLClientsTable();
String QUERY = "DELETE FROM " + table + " WHERE app_id = ? AND tenant_id = ? AND client_id = ?";
try {
return update(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
pst.setString(3, clientId);
}) > 0;
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static void removeExpiredSAMLCodesAndRelayStates(Start start) throws StorageQueryException {
try {
{
String QUERY = "DELETE FROM " + Config.getConfig(start).getSAMLClaimsTable() + " WHERE expires_at <= ?";
update(start, QUERY, pst -> {
pst.setLong(1, System.currentTimeMillis());
});
}
{
String QUERY = "DELETE FROM " + Config.getConfig(start).getSAMLRelayStateTable() + " WHERE expires_at <= ?";
update(start, QUERY, pst -> {
pst.setLong(1, System.currentTimeMillis());
});
}
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
public static int countSAMLClients(Start start, TenantIdentifier tenantIdentifier) throws StorageQueryException {
String table = Config.getConfig(start).getSAMLClientsTable();
String QUERY = "SELECT COUNT(*) as c FROM " + table
+ " WHERE app_id = ? AND tenant_id = ?";
try {
return execute(start, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getTenantId());
}, result -> {
if (result.next()) {
return result.getInt("c");
}
return 0;
});
} catch (SQLException e) {
throw new StorageQueryException(e);
}
}
}

View File

@ -76,11 +76,6 @@ public class SessionQueries {
+ Config.getConfig(start).getSessionInfoTable() + "(expires_at);";
}
static String getQueryToCreateSessionAppIdUserIdIndex(Start start) {
return "CREATE INDEX session_info_user_id_app_id_index ON "
+ Config.getConfig(start).getSessionInfoTable() + "(user_id, app_id);";
}
public static void createNewSession(Start start, TenantIdentifier tenantIdentifier, String sessionHandle,
String userId, String refreshTokenHash2,
JsonObject userDataInDatabase, long expiry, JsonObject userDataInJWT,
@ -133,60 +128,21 @@ public class SessionQueries {
return null;
}
QUERY = "SELECT external_user_id, 0 as o " +
"FROM " + getConfig(start).getUserIdMappingTable() + " um2 " +
"WHERE um2.app_id = ? AND um2.supertokens_user_id IN (" +
"SELECT primary_or_recipe_user_id " +
"FROM " + getConfig(start).getUsersTable() + " " +
"WHERE app_id = ? AND user_id IN (" +
"SELECT user_id FROM (" +
"SELECT um1.supertokens_user_id as user_id, 0 as o1 " +
"FROM " + getConfig(start).getUserIdMappingTable() + " um1 " +
"WHERE um1.app_id = ? AND um1.external_user_id = ? " +
"UNION " +
"SELECT ?, 1 as o1 " +
"ORDER BY o1 ASC " +
") uid1" +
")" +
") " +
"UNION " +
"SELECT primary_or_recipe_user_id, 1 as o " +
"FROM " + getConfig(start).getUsersTable() + " " +
"WHERE app_id = ? AND user_id IN (" +
"SELECT user_ID FROM (" +
"SELECT um1.supertokens_user_id as user_id, 0 as o2 " +
"FROM " + getConfig(start).getUserIdMappingTable() + " um1 " +
"WHERE um1.app_id = ? AND um1.external_user_id = ? " +
"UNION " +
"SELECT ?, 1 as o2 " +
"ORDER BY o2 ASC " +
") uid2 " +
") " +
"ORDER BY o ASC " +
"LIMIT 1";
QUERY = "SELECT primary_or_recipe_user_id FROM " + getConfig(start).getUsersTable()
+ " WHERE app_id = ? AND user_id = ?";
String finalUserId = execute(con, QUERY, pst -> {
return execute(con, QUERY, pst -> {
pst.setString(1, tenantIdentifier.getAppId());
pst.setString(2, tenantIdentifier.getAppId());
pst.setString(3, tenantIdentifier.getAppId());
pst.setString(4, sessionInfo.recipeUserId);
pst.setString(5, sessionInfo.recipeUserId);
pst.setString(6, tenantIdentifier.getAppId());
pst.setString(7, tenantIdentifier.getAppId());
pst.setString(8, sessionInfo.recipeUserId);
pst.setString(9, sessionInfo.recipeUserId);
pst.setString(2, sessionInfo.recipeUserId);
}, result -> {
if (result.next()) {
return result.getString(1);
String primaryUserId = result.getString("primary_or_recipe_user_id");
if (primaryUserId != null) {
sessionInfo.userId = primaryUserId;
}
}
return sessionInfo.recipeUserId;
return sessionInfo;
});
if (finalUserId != null) {
sessionInfo.userId = finalUserId;
}
return sessionInfo;
}
public static void updateSessionInfo_Transaction(Start start, Connection con, TenantIdentifier tenantIdentifier,

View File

@ -85,11 +85,6 @@ public class ThirdPartyQueries {
// @formatter:on
}
static String getQueryToCreateThirdPartyUserToTenantThirdPartyUserIdIndex(Start start) {
return "CREATE INDEX thirdparty_user_to_tenant_third_party_user_id_index ON "
+ Config.getConfig(start).getThirdPartyUserToTenantTable() + "(app_id, tenant_id, third_party_id, third_party_user_id);";
}
public static AuthRecipeUserInfo signUp(Start start, TenantIdentifier tenantIdentifier, String id, String email,
LoginMethod.ThirdParty thirdParty, long timeJoined)
throws StorageQueryException, StorageTransactionLogicException {

View File

@ -107,16 +107,12 @@ public class UserIdMappingQueries {
String userId)
throws SQLException, StorageQueryException {
String QUERY = "SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable()
+ " WHERE app_id = ? AND supertokens_user_id = ?"
+ " UNION ALL "
+ "SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable()
+ " WHERE app_id = ? AND external_user_id = ?";
+ " WHERE app_id = ? AND (supertokens_user_id = ? OR external_user_id = ?)";
return execute(start, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, userId);
pst.setString(3, appIdentifier.getAppId());
pst.setString(4, userId);
pst.setString(3, userId);
}, result -> {
ArrayList<UserIdMapping> userIdMappingArray = new ArrayList<>();
while (result.next()) {
@ -133,16 +129,12 @@ public class UserIdMappingQueries {
String userId)
throws SQLException, StorageQueryException {
String QUERY = "SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable()
+ " WHERE app_id = ? AND supertokens_user_id = ?"
+ " UNION ALL "
+ "SELECT * FROM " + Config.getConfig(start).getUserIdMappingTable()
+ " WHERE app_id = ? AND external_user_id = ?";
+ " WHERE app_id = ? AND (supertokens_user_id = ? OR external_user_id = ?)";
return execute(sqlCon, QUERY, pst -> {
pst.setString(1, appIdentifier.getAppId());
pst.setString(2, userId);
pst.setString(3, appIdentifier.getAppId());
pst.setString(4, userId);
pst.setString(3, userId);
}, result -> {
ArrayList<UserIdMapping> userIdMappingArray = new ArrayList<>();
while (result.next()) {

View File

@ -66,11 +66,6 @@ public class UserRolesQueries {
+ Config.getConfig(start).getUserRolesPermissionsTable() + "(app_id, permission);";
}
public static String getQueryToCreateUserRolesUserIdAppIdIndex(Start start) {
return "CREATE INDEX user_roles_app_id_user_id_index ON " + Config.getConfig(start).getUserRolesTable() +
"(app_id, user_id)";
}
public static String getQueryToCreateUserRolesTable(Start start) {
String tableName = Config.getConfig(start).getUserRolesTable();
// @formatter:off

Some files were not shown because too many files have changed in this diff Show More