14 Commits

Author SHA1 Message Date
Oscar Krause
84b092c1e6 added notes about cronjob for ha 2023-03-01 08:17:30 +01:00
Oscar Krause
3921fc44f7 fixes 2023-02-28 12:48:11 +01:00
Oscar Krause
9a370f817a dont verify ssl cert on ha replication 2023-02-28 11:21:52 +01:00
Oscar Krause
de28d000e7 ha improvements 2023-02-28 09:00:20 +01:00
Oscar Krause
a3cbcd8df7 fixes 2023-02-28 08:43:40 +01:00
Oscar Krause
bb0eb38e93 merged dev into ha 2023-02-28 07:54:43 +01:00
Oscar Krause
32fbd78c3d Merge branch 'dev' into ha
# Conflicts:
#	README.md
2023-02-28 07:53:08 +01:00
Oscar Krause
70a2fb69bb added some ha notes 2023-02-28 07:50:14 +01:00
Oscar Krause
504eb776be improvements 2023-02-28 07:50:04 +01:00
Oscar Krause
8593b3fd20 fixed fingerprints when HA is enabled 2023-02-28 07:16:39 +01:00
Oscar Krause
4c556fde9f implemented ha endpoints and configuration 2023-02-27 10:41:21 +01:00
Oscar Krause
b9dad7f87c implemented deserializer 2023-02-27 10:40:44 +01:00
Oscar Krause
21d6e48bcc added httpx dependency 2023-02-27 10:40:14 +01:00
Oscar Krause
c49cf20550 added notes about database connections others than sqlite 2023-02-27 08:18:07 +01:00
14 changed files with 348 additions and 799 deletions

View File

@@ -2,7 +2,7 @@ Package: fastapi-dls
Version: 0.0 Version: 0.0
Architecture: all Architecture: all
Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de
Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-jose, python3-sqlalchemy, python3-pycryptodome, python3-markdown, uvicorn, openssl Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-jose, python3-sqlalchemy, python3-pycryptodome, python3-markdown, python3-httpx, uvicorn, openssl
Recommends: curl Recommends: curl
Installed-Size: 10240 Installed-Size: 10240
Homepage: https://git.collinwebdesigns.de/oscar.krause/fastapi-dls Homepage: https://git.collinwebdesigns.de/oscar.krause/fastapi-dls

View File

@@ -22,9 +22,8 @@ sha256sums=('SKIP'
'3dc60140c08122a8ec0e7fa7f0937eb8c1288058890ba09478420fc30ce9e30c') '3dc60140c08122a8ec0e7fa7f0937eb8c1288058890ba09478420fc30ce9e30c')
pkgver() { pkgver() {
echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > $srcdir/$pkgname/version.env
source $srcdir/$pkgname/version.env source $srcdir/$pkgname/version.env
echo $VERSION echo ${VERSION}
} }
check() { check() {

View File

@@ -1,48 +0,0 @@
<?xml version="1.0"?>
<Container version="2">
<Name>FastAPI-DLS</Name>
<Repository>collinwebdesigns/fastapi-dls:latest</Repository>
<Registry>https://hub.docker.com/r/collinwebdesigns/fastapi-dls</Registry>
<Network>br0</Network>
<MyIP></MyIP>
<Shell>sh</Shell>
<Privileged>false</Privileged>
<Support/>
<Project/>
<Overview>Source:&#xD;
https://git.collinwebdesigns.de/oscar.krause/fastapi-dls#docker&#xD;
&#xD;
Make sure you create these certificates before starting the container for the first time:&#xD;
```&#xD;
# Check https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/main/#docker for more information:&#xD;
WORKING_DIR=/mnt/user/appdata/fastapi-dls/cert&#xD;
mkdir -p $WORKING_DIR&#xD;
cd $WORKING_DIR&#xD;
# create instance private and public key for singing JWT's&#xD;
openssl genrsa -out $WORKING_DIR/instance.private.pem 2048 &#xD;
openssl rsa -in $WORKING_DIR/instance.private.pem -outform PEM -pubout -out $WORKING_DIR/instance.public.pem&#xD;
# create ssl certificate for integrated webserver (uvicorn) - because clients rely on ssl&#xD;
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $WORKING_DIR/webserver.key -out $WORKING_DIR/webserver.crt&#xD;
```&#xD;
</Overview>
<Category/>
<WebUI>https://[IP]:[PORT:443]</WebUI>
<TemplateURL/>
<Icon>https://git.collinwebdesigns.de/uploads/-/system/project/avatar/106/png-transparent-nvidia-grid-logo-business-nvidia-electronics-text-trademark.png?width=64</Icon>
<ExtraParams>--restart always</ExtraParams>
<PostArgs/>
<CPUset/>
<DateInstalled>1679161568</DateInstalled>
<DonateText/>
<DonateLink/>
<Requires/>
<Config Name="HTTPS Port" Target="" Default="443" Mode="tcp" Description="Same as DLS Port below." Type="Port" Display="always-hide" Required="true" Mask="false">443</Config>
<Config Name="App Cert" Target="/app/cert" Default="/mnt/user/appdata/fastapi-dls/cert" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. &#13;&#10;&#13;&#10;You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/cert</Config>
<Config Name="DLS Port" Target="DSL_PORT" Default="443" Mode="" Description="Choose port you want to use. Make sure to change the HTTPS port above to match it." Type="Variable" Display="always-hide" Required="true" Mask="false">443</Config>
<Config Name="App database" Target="/app/database" Default="/mnt/user/appdata/fastapi-dls/data" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. &#13;&#10;&#13;&#10;You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/data</Config>
<Config Name="DSL IP" Target="DLS_URL" Default="localhost" Mode="" Description="Put your container's IP (or your host's IP if it's shared)." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
<Config Name="Time Zone" Target="TZ" Default="" Mode="" Description="Format example: America/New_York. MUST MATCH YOUR CURRENT TIMEZONE AND THE GUEST VMS TIMEZONE! Otherwise you'll get into issues, read the guide above." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
<Config Name="Database" Target="DATABASE" Default="sqlite:////app/database/db.sqlite" Mode="" Description="Set to sqlite:////app/database/db.sqlite" Type="Variable" Display="advanced-hide" Required="true" Mask="false">sqlite:////app/database/db.sqlite</Config>
<Config Name="Debug" Target="DEBUG" Default="true" Mode="" Description="true to enable debugging, false to disable them." Type="Variable" Display="advanced-hide" Required="false" Mask="false">true</Config>
<Config Name="Lease" Target="LEASE_EXPIRE_DAYS" Default="90" Mode="" Description="90 days is the maximum value." Type="Variable" Display="advanced" Required="false" Mask="false">90</Config>
</Container>

View File

@@ -1,197 +0,0 @@
#!/bin/bash
# This script automates the licensing of the vGPU guest driver
# on Unraid boot. Set the Schedule to: "At Startup of Array".
#
# Relies on FastAPI-DLS for the licensing.
# It assumes FeatureType=1 (vGPU), change it as you see fit in line <114>
#
# Requires `eflutils` to be installed in the system for `nvidia-gridd` to run
# To Install it:
# 1) You might find it here: https://packages.slackware.com/ (choose the 64bit version of Slackware)
# 2) Download the package and put it in /boot/extra to be installed on boot
# 3) a. Reboot to install it, OR
# b. Run `upgradepkg --install-new /boot/extra/elfutils*`
# [i]: Make sure to have only one version of elfutils, otherwise you might run into issues
# Sources and docs:
# https://docs.nvidia.com/grid/15.0/grid-vgpu-user-guide/index.html#configuring-nls-licensed-client-on-linux
#
################################################
# MAKE SURE YOU CHANGE THESE VARIABLES #
################################################
###### CHANGE ME!
# IP and PORT of FastAPI-DLS
DLS_IP=192.168.0.123
DLS_PORT=443
# Token folder, must be on a filesystem that supports
# linux filesystem permissions (eg: ext4,xfs,btrfs...)
TOKEN_PATH=/mnt/user/system/nvidia
PING=$(which ping)
# Check if the License is applied
if [[ "$(nvidia-smi -q | grep "Expiry")" == *Expiry* ]]; then
echo " [i] Your vGPU Guest drivers are already licensed."
echo " [i] $(nvidia-smi -q | grep "Expiry")"
echo " [<] Exiting..."
exit 0
fi
# Check if the FastAPI-DLS server is reachable
# Check if the License is applied
MAX_RETRIES=30
for i in $(seq 1 $MAX_RETRIES); do
echo -ne "\r [>] Attempt $i to connect to $DLS_IP."
if ping -c 1 $DLS_IP >/dev/null 2>&1; then
echo -e "\n [*] Connection successful."
break
fi
if [ $i -eq $MAX_RETRIES ]; then
echo -e "\n [!] Connection failed after $MAX_RETRIES attempts."
echo -e "\n [<] Exiting..."
exit 1
fi
sleep 1
done
# Check if the token folder exists
if [ -d "${TOKEN_PATH}" ]; then
echo " [*] Token Folder exists. Proceeding..."
else
echo " [!] Token Folder does not exists or not ready yet. Exiting."
echo " [!] Token Folder Specified: ${TOKEN_PATH}"
exit 1
fi
# Check if elfutils are installed, otherwise nvidia-gridd service
# wont start
if [ "$(grep -R "elfutils" /var/log/packages/* | wc -l)" != 0 ]; then
echo " [*] Elfutils is installed, proceeding..."
else
echo " [!] Elfutils is not installed, downloading and installing..."
echo " [!] Downloading elfutils to /boot/extra"
echo " [i] This script will download elfutils from slackware64-15.0 repository."
echo " [i] If you have a different version of Unraid (6.11.5), you might want to"
echo " [i] download and install a suitable version manually from the slackware"
echo " [i] repository, and put it in /boot/extra to be install on boot."
echo " [i] You may also install it by running: "
echo " [i] upgradepkg --install-new /path/to/elfutils-*.txz"
echo ""
echo " [>] Downloading elfutils from slackware64-15.0 repository:"
wget -q -nc --show-progress --progress=bar:force:noscroll -P /boot/extra https://slackware.uk/slackware/slackware64-15.0/slackware64/l/elfutils-0.186-x86_64-1.txz 2>/dev/null \
|| { echo " [!] Error while downloading elfutils, please download it and install it manually."; exit 1; }
echo ""
if upgradepkg --install-new /boot/extra/elfutils-0.186-x86_64-1.txz
then
echo " [*] Elfutils installed and will be installed automatically on boot"
else
echo " [!] Error while installing, check logs..."
exit 1
fi
fi
echo " [~] Sleeping for 60 seconds before continuing..."
echo " [i] The script is waiting until the boot process settles down."
for i in {60..1}; do
printf "\r [~] %d seconds remaining" "$i"
sleep 1
done
printf "\n"
create_token () {
echo " [>] Creating new token..."
if ${PING} -c1 ${DLS_IP} > /dev/null 2>&1
then
# curl --insecure -L -X GET https://${DLS_IP}:${DLS_PORT}/-/client-token -o ${TOKEN_PATH}/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok || { echo " [!] Could not get the token, please check the server."; exit 1;}
wget -q -nc -4c --no-check-certificate --show-progress --progress=bar:force:noscroll -O "${TOKEN_PATH}"/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok https://${DLS_IP}:${DLS_PORT}/-/client-token \
|| { echo " [!] Could not get the token, please check the server."; exit 1;}
chmod 744 "${TOKEN_PATH}"/*.tok || { echo " [!] Could not chmod the tokens."; exit 1; }
echo ""
echo " [*] Token downloaded and stored in ${TOKEN_PATH}."
else
echo " [!] Could not get token, DLS server unavailable ."
exit 1
fi
}
setup_run () {
echo " [>] Setting up gridd.conf"
cp /etc/nvidia/gridd.conf.template /etc/nvidia/gridd.conf || { echo " [!] Error configuring gridd.conf, did you install the drivers correctly?"; exit 1; }
sed -i 's/FeatureType=0/FeatureType=1/g' /etc/nvidia/gridd.conf
echo "ClientConfigTokenPath=${TOKEN_PATH}" >> /etc/nvidia/gridd.conf
echo " [>] Creating /var/lib/nvidia folder structure"
mkdir -p /var/lib/nvidia/GridLicensing
echo " [>] Starting nvidia-gridd"
if pgrep nvidia-gridd >/dev/null 2>&1; then
echo " [!] nvidia-gridd service is running. Closing."
sh /usr/lib/nvidia/sysv/nvidia-gridd stop
stop_exit_code=$?
if [ $stop_exit_code -eq 0 ]; then
echo " [*] nvidia-gridd service stopped successfully."
else
echo " [!] Error while stopping nvidia-gridd service."
exit 1
fi
# Kill the service if it does not close
if pgrep nvidia-gridd >/dev/null 2>&1; then
kill -9 "$(pgrep nvidia-gridd)" || {
echo " [!] Error while closing nvidia-gridd service"
exit 1
}
fi
echo " [*] Restarting nvidia-gridd service."
sh /usr/lib/nvidia/sysv/nvidia-gridd start
if pgrep nvidia-gridd >/dev/null 2>&1; then
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
else
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
exit 1
fi
else
sh /usr/lib/nvidia/sysv/nvidia-gridd start
if pgrep nvidia-gridd >/dev/null 2>&1; then
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
else
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
exit 1
fi
fi
}
for token in "${TOKEN_PATH}"/*; do
if [ "${token: -4}" == ".tok" ]
then
echo " [*] Tokens found..."
setup_run
else
echo " [!] No Tokens found..."
create_token
setup_run
fi
done
while true; do
if nvidia-smi -q | grep "Expiry" >/dev/null 2>&1; then
echo " [>] vGPU licensed!"
echo " [i] $(nvidia-smi -q | grep "Expiry")"
break
else
echo -ne " [>] vGPU not licensed yet... Checking again in 5 seconds\c"
for i in {1..5}; do
sleep 1
echo -ne ".\c"
done
echo -ne "\r\c"
fi
done
echo " [>] Done..."
exit 0

View File

@@ -20,38 +20,26 @@ build:docker:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
tags: [ docker ] tags: [ docker ]
before_script: before_script:
- docker buildx inspect - echo "COMMIT=${CI_COMMIT_SHA}" >> version.env # COMMIT=`git rev-parse HEAD`
- docker buildx create --use
script: script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA - docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
- docker buildx build --progress=plain --platform linux/amd64,linux/arm64 --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE --push . - docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
- docker buildx imagetools inspect $IMAGE
- echo "CS_IMAGE=$IMAGE" > container_scanning.env
artifacts:
reports:
dotenv: container_scanning.env
build:apt: build:apt:
image: debian:bookworm-slim image: debian:bookworm-slim
interruptible: true interruptible: true
stage: build stage: build
rules: rules:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
variables:
VERSION: $CI_COMMIT_REF_NAME
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes: changes:
- app/**/* - app/**/*
- .DEBIAN/**/* - .DEBIAN/**/*
- .gitlab-ci.yml
variables:
VERSION: "0.0.1"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
variables:
VERSION: "0.0.1"
before_script: before_script:
- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env - echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
- source version.env
# install build dependencies # install build dependencies
- apt-get update -qq && apt-get install -qq -y build-essential - apt-get update -qq && apt-get install -qq -y build-essential
# create build directory for .deb sources # create build directory for .deb sources
@@ -72,7 +60,7 @@ build:apt:
# cd into "build/" # cd into "build/"
- cd build/ - cd build/
script: script:
# set version based on value in "$CI_COMMIT_REF_NAME" # set version based on value in "$VERSION" (which is set above from version.env)
- sed -i -E 's/(Version\:\s)0.0/\1'"$VERSION"'/g' DEBIAN/control - sed -i -E 's/(Version\:\s)0.0/\1'"$VERSION"'/g' DEBIAN/control
# build # build
- dpkg -b . build.deb - dpkg -b . build.deb
@@ -87,21 +75,14 @@ build:pacman:
interruptible: true interruptible: true
stage: build stage: build
rules: rules:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
variables:
VERSION: $CI_COMMIT_REF_NAME
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes: changes:
- app/**/* - app/**/*
- .PKGBUILD/**/* - .PKGBUILD/**/*
- .gitlab-ci.yml
variables:
VERSION: "0.0.1"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
variables:
VERSION: "0.0.1"
before_script: before_script:
#- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env - echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
# install build dependencies # install build dependencies
- pacman -Syu --noconfirm git - pacman -Syu --noconfirm git
# create a build-user because "makepkg" don't like root user # create a build-user because "makepkg" don't like root user
@@ -116,7 +97,7 @@ build:pacman:
# download dependencies # download dependencies
- source PKGBUILD && pacman -Syu --noconfirm --needed --asdeps "${makedepends[@]}" "${depends[@]}" - source PKGBUILD && pacman -Syu --noconfirm --needed --asdeps "${makedepends[@]}" "${depends[@]}"
# build # build
- sudo --preserve-env -u build makepkg -s - sudo -u build makepkg -s
artifacts: artifacts:
expire_in: 1 week expire_in: 1 week
paths: paths:
@@ -127,7 +108,6 @@ test:
stage: test stage: test
rules: rules:
- if: $CI_COMMIT_BRANCH - if: $CI_COMMIT_BRANCH
- if: $CI_COMMIT_TAG
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
variables: variables:
DATABASE: sqlite:///../app/db.sqlite DATABASE: sqlite:///../app/db.sqlite
@@ -212,26 +192,28 @@ code_quality:
- if: $CODE_QUALITY_DISABLED - if: $CODE_QUALITY_DISABLED
when: never when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
secret_detection: secret_detection:
rules: rules:
- if: $SECRET_DETECTION_DISABLED - if: $SECRET_DETECTION_DISABLED
when: never when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
before_script: - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- git config --global --add safe.directory $CI_PROJECT_DIR
semgrep-sast: semgrep-sast:
rules: rules:
- if: $SAST_DISABLED - if: $SAST_DISABLED
when: never when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
test_coverage: test_coverage:
extends: test extends: test
allow_failure: true allow_failure: true
rules: rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
script: script:
- pip install pytest pytest-cov - pip install pytest pytest-cov
- coverage run -m pytest main.py - coverage run -m pytest main.py
@@ -250,43 +232,51 @@ container_scanning:
- if: $CONTAINER_SCANNING_DISABLED - if: $CONTAINER_SCANNING_DISABLED
when: never when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
gemnasium-python-dependency_scanning: gemnasium-python-dependency_scanning:
rules: rules:
- if: $DEPENDENCY_SCANNING_DISABLED - if: $DEPENDENCY_SCANNING_DISABLED
when: never when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
.deploy: .deploy:
rules: rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
when: never
deploy:docker: deploy:docker:
extends: .deploy extends: .deploy
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
before_script: before_script:
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME" - echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
- source version.env
- echo "Building docker image for commit ${COMMIT} with version ${VERSION}"
script: script:
- echo "========== GitLab-Registry ==========" - echo "GitLab-Registry"
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME - docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME - docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest - docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
- docker push $IMAGE:$CI_COMMIT_REF_NAME - docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
- docker push $IMAGE:latest - echo "Docker-Hub"
- echo "========== Docker-Hub =========="
- docker login -u $PUBLIC_REGISTRY_USER -p $PUBLIC_REGISTRY_TOKEN - docker login -u $PUBLIC_REGISTRY_USER -p $PUBLIC_REGISTRY_TOKEN
- IMAGE=$PUBLIC_REGISTRY_USER/$CI_PROJECT_NAME - docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME - docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest - docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
- docker push $IMAGE:$CI_COMMIT_REF_NAME - docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
- docker push $IMAGE:latest
deploy:apt: deploy:apt:
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package # doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
extends: .deploy extends: .deploy
image: debian:bookworm-slim image: debian:bookworm-slim
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
needs: needs:
- job: build:apt - job: build:apt
artifacts: true artifacts: true
@@ -326,6 +316,8 @@ deploy:pacman:
extends: .deploy extends: .deploy
image: archlinux:base-devel image: archlinux:base-devel
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
needs: needs:
- job: build:pacman - job: build:pacman
artifacts: true artifacts: true
@@ -333,9 +325,9 @@ deploy:pacman:
- source .PKGBUILD/PKGBUILD - source .PKGBUILD/PKGBUILD
- source version.env - source version.env
# fastapi-dls-1.0-1-any.pkg.tar.zst # fastapi-dls-1.0-1-any.pkg.tar.zst
- BUILD_NAME=${pkgname}-${CI_COMMIT_REF_NAME}-${pkgrel}-any.pkg.tar.zst - BUILD_NAME=${pkgname}-${VERSION}-${pkgrel}-any.pkg.tar.zst
- PACKAGE_NAME=${pkgname} - PACKAGE_NAME=${pkgname}
- PACKAGE_VERSION=${CI_COMMIT_REF_NAME} - PACKAGE_VERSION=${VERSION}
- PACKAGE_ARCH=any - PACKAGE_ARCH=any
- EXPORT_NAME=${BUILD_NAME} - EXPORT_NAME=${BUILD_NAME}
- 'echo "PACKAGE_NAME: ${PACKAGE_NAME}"' - 'echo "PACKAGE_NAME: ${PACKAGE_NAME}"'
@@ -347,15 +339,19 @@ deploy:pacman:
release: release:
image: registry.gitlab.com/gitlab-org/release-cli:latest image: registry.gitlab.com/gitlab-org/release-cli:latest
stage: .post stage: .post
needs: [ test ] needs:
- job: test
artifacts: true
rules: rules:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
when: never
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
script: script:
- echo "Running release-job for $CI_COMMIT_TAG" - echo "Running release-job for $VERSION"
release: release:
name: $CI_PROJECT_TITLE $CI_COMMIT_TAG name: $CI_PROJECT_TITLE $VERSION
description: Release of $CI_PROJECT_TITLE version $CI_COMMIT_TAG description: Release of $CI_PROJECT_TITLE version $VERSION
tag_name: $CI_COMMIT_TAG tag_name: $VERSION
ref: $CI_COMMIT_SHA ref: $CI_COMMIT_SHA
assets: assets:
links: links:

View File

@@ -1,9 +1,5 @@
FROM python:3.11-alpine FROM python:3.11-alpine
ARG VERSION
ARG COMMIT=""
RUN echo -e "VERSION=$VERSION\nCOMMIT=$COMMIT" > /version.env
COPY requirements.txt /tmp/requirements.txt COPY requirements.txt /tmp/requirements.txt
RUN apk update \ RUN apk update \
@@ -15,6 +11,7 @@ RUN apk update \
&& apk del build-deps && apk del build-deps
COPY app /app COPY app /app
COPY version.env /version.env
COPY README.md /README.md COPY README.md /README.md
HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/-/health || exit 1 HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/-/health || exit 1

108
README.md
View File

@@ -9,9 +9,9 @@ Only the clients need a connection to this service on configured port.
**Official Links** **Official Links**
- https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git) - https://git.collinwebdesigns.de/oscar.krause/fastapi-dls
- https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git) - https://gitea.publichub.eu/oscar.krause/fastapi-dls
- https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`) - Docker Image `collinwebdesigns/fastapi-dls:latest`
*All other repositories are forks! (which is no bad - just for information and bug reports)* *All other repositories are forks! (which is no bad - just for information and bug reports)*
@@ -32,6 +32,17 @@ Tested with Ubuntu 22.10 (from Proxmox templates), actually its consuming 100mb
- Make sure your timezone is set correct on you fastapi-dls server and your client - Make sure your timezone is set correct on you fastapi-dls server and your client
**HA Setup Notes**
- only *failover mode* is supported by team-green (see *high availability* in official user guide)
- make sure you're using same configuration on each node
- use same `instance.private.pem` and `instance.private.key` on each node
- add `cronjob` on each node with `curl -X GET --insecure https://localhost/-/ha/replicate`
If you want to use *real* HA, you should use a proxy in front of this service and use a clustered database in backend.
This is not documented and supported by me, but it *can* work. Please ask the community for help.
Maybe the simplest solution for HA-ing this service is to use a Docker-Swarm with redundant storage and database.
## Docker ## Docker
Docker-Images are available here: Docker-Images are available here:
@@ -145,9 +156,9 @@ This is only to test whether the service starts successfully.
```shell ```shell
cd /opt/fastapi-dls/app cd /opt/fastapi-dls/app
sudo -u www-data /opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app
# or
su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app" su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
# or
sudo -u www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
``` ```
**Create config file** **Create config file**
@@ -247,8 +258,6 @@ This is only to test whether the service starts successfully.
BASE_DIR=/opt/fastapi-dls BASE_DIR=/opt/fastapi-dls
SERVICE_USER=dls SERVICE_USER=dls
cd ${BASE_DIR} cd ${BASE_DIR}
sudo -u ${SERVICE_USER} ${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app
# or
su - ${SERVICE_USER} -c "${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app" su - ${SERVICE_USER} -c "${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app"
``` ```
@@ -310,7 +319,7 @@ Packages are available here:
Successful tested with: Successful tested with:
- Debian 12 (Bookworm) - Debian 12 (Bookworm) (works but not recommended because it is currently in *testing* state)
- Ubuntu 22.10 (Kinetic Kudu) - Ubuntu 22.10 (Kinetic Kudu)
Not working with: Not working with:
@@ -354,19 +363,6 @@ pacman -U --noconfirm fastapi-dls.pkg.tar.zst
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`. Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
## unRAID
1. Download [this xml file](.UNRAID/FastAPI-DLS.xml)
2. Put it in /boot/config/plugins/dockerMan/templates-user/
3. Go to Docker page, scroll down to `Add Container`, click on Template list and choose `FastAPI-DLS`
4. Open terminal/ssh, follow the instructions in overview description
5. Setup your container `IP`, `Port`, `DLS_URL` and `DLS_PORT`
6. Apply and let it boot up
*Unraid users must also make sure they have Host access to custom networks enabled if unraid is the vgpu guest*.
Continue [here](#unraid-guest) for docker guest setup.
## Let's Encrypt Certificate (optional) ## Let's Encrypt Certificate (optional)
If you're using installation via docker, you can use `traefik`. Please refer to their documentation. If you're using installation via docker, you can use `traefik`. Please refer to their documentation.
@@ -385,23 +381,28 @@ After first success you have to replace `--issue` with `--renew`.
# Configuration # Configuration
| Variable | Default | Usage | | Variable | Default | Usage |
|------------------------|----------------------------------------|------------------------------------------------------------------------------------------------------| |------------------------|----------------------------------------|--------------------------------------------------------------------------------------------------------------------|
| `DEBUG` | `false` | Toggles `fastapi` debug mode | | `DEBUG` | `false` | Toggles `fastapi` debug mode |
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable | | `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable | | `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) | | `HA_REPLICATE` | | `DLS_URL` + `DLS_PORT` of primary DLS instance, e.g. `dls-node:443` (for HA only **two** nodes are supported!) \*1 |
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days | | `HA_ROLE` | | `PRIMARY` or `SECONDARY` |
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 | | `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) | | `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 | | `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*2 |
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid | | `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid | | `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*3 |
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid | | `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 | | `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 | | `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*4 |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*4 |
\*1 For example, if the lease period is one day and the renewal period is 20%, the client attempts to renew its license \*1 If you want to use HA, this value should be point to `secondary` on `primary` and `primary` on `secondary`. Don't
use same database for both instances!
\*2 For example, if the lease period is one day and the renewal period is 20%, the client attempts to renew its license
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
client has 19.2 hours in which to re-establish connectivity before its license expires. client has 19.2 hours in which to re-establish connectivity before its license expires.
@@ -415,15 +416,9 @@ client has 19.2 hours in which to re-establish connectivity before its license e
Successfully tested with this package versions: Successfully tested with this package versions:
| vGPU Suftware | vGPU Manager | Linux Driver | Windows Driver | Release Date | - `14.3` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `513.91`)
|---------------|--------------|--------------|----------------|---------------| - `14.4` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `514.08`)
| `15.2` | `525.105.14` | `525.105.17` | `528.89` | March 2023 | - `15.0` (Linux-Host: `525.60.12`, Linux-Guest: `525.60.13`, Windows-Guest: `527.41`)
| `15.1` | `525.85.07` | `525.85.05` | `528.24` | January 2023 |
| `15.0` | `525.60.12` | `525.60.13` | `527.41` | December 2022 |
| `14.4` | `510.108.03` | `510.108.03` | `514.08` | December 2022 |
| `14.3` | `510.108.03` | `510.108.03` | `513.91` | November 2022 |
- https://docs.nvidia.com/grid/index.html
## Linux ## Linux
@@ -475,7 +470,7 @@ Restart-Service NVDisplay.ContainerLocalSystem
Check licensing status: Check licensing status:
```shell ```shell
& 'nvidia-smi' -q | Select-String "License" & 'C:\Program Files\NVIDIA Corporation\NVSMI\nvidia-smi.exe' -q | Select-String "License"
``` ```
Output should be something like: Output should be something like:
@@ -487,19 +482,6 @@ vGPU Software Licensed Product
Done. For more information check [troubleshoot section](#troubleshoot). Done. For more information check [troubleshoot section](#troubleshoot).
## unRAID Guest
1. Make sure you create a folder in a linux filesystem (BTRFS/XFS/EXT4...), I recommend `/mnt/user/system/nvidia` (this is where docker and libvirt preferences are saved, so it's a good place to have that)
2. Edit the script to put your `DLS_IP`, `DLS_PORT` and `TOKEN_PATH`, properly
3. Install `User Scripts` plugin from *Community Apps* (the Apps page, or google User Scripts Unraid if you're not using CA)
4. Go to `Settings > Users Scripts > Add New Script`
5. Give it a name (the name must not contain spaces preferably)
6. Click on the *gear icon* to the left of the script name then edit script
7. Paste the script and save
8. Set schedule to `At First Array Start Only`
9. Click on Apply
# Endpoints # Endpoints
### `GET /` ### `GET /`
@@ -702,8 +684,4 @@ The error message can safely be ignored (since we have no license limitation :P)
Thanks to vGPU community and all who uses this project and report bugs. Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to Special thanks to @samicrusader who created build file for ArchLinux and @cyrus who wrote the section for openSUSE.
- @samicrusader who created build file for ArchLinux
- @cyrus who wrote the section for openSUSE
- @midi who wrote the section for unRAID

View File

@@ -1,27 +0,0 @@
# Roadmap
I am planning to implement the following features in the future.
## HA - High Availability
Support Failover-Mode (secondary ip address) as in official DLS.
**Note**: There is no Load-Balancing / Round-Robin HA Mode supported! If you want to use that, consider to use
Docker-Swarm with shared/cluster database (e.g. postgres).
*See [ha branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ha) for current status.*
## UI - User Interface
Add a user interface to manage origins and leases.
*See [ui branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ui) for current status.*
## Config Database
Instead of using environment variables, configuration files and manually create certificates, store configs and
certificates in database (like origins and leases). Also, there should be provided a startup assistant to prefill
required attributes and create instance-certificates. This is more user-friendly and should improve fist setup.

View File

@@ -6,45 +6,52 @@ from os.path import join, dirname
from os import getenv as env from os import getenv as env
from dotenv import load_dotenv from dotenv import load_dotenv
from fastapi import FastAPI from fastapi import FastAPI, BackgroundTasks
from fastapi.requests import Request from fastapi.requests import Request
from json import loads as json_loads from json import loads as json_loads
from datetime import datetime from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from calendar import timegm from calendar import timegm
from jose import jws, jwt, JWTError from jose import jws, jwk, jwt, JWTError
from jose.constants import ALGORITHMS from jose.constants import ALGORITHMS
from starlette.middleware.cors import CORSMiddleware from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from orm import init as db_init, migrate, Site, Instance, Origin, Lease from util import load_key, load_file, ha_replicate
from orm import Origin, Lease, init as db_init, migrate
load_dotenv('../version.env') load_dotenv('../version.env')
# get local timezone
TZ = datetime.now().astimezone().tzinfo TZ = datetime.now().astimezone().tzinfo
# fetch version info
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False)) VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
# fastapi setup config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
config = dict(openapi_url='/-/openapi.json', docs_url=None, redoc_url=None)
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config) app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
# database setup
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite'))) db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
db_init(db), migrate(db) db_init(db), migrate(db)
# DLS setup (static) # everything prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service
DLS_URL = str(env('DLS_URL', 'localhost')) DLS_URL = str(env('DLS_URL', 'localhost'))
DLS_PORT = int(env('DLS_PORT', '443')) DLS_PORT = int(env('DLS_PORT', '443'))
HA_REPLICATE, HA_ROLE = env('HA_REPLICATE', None), env('HA_ROLE', None) # only failover is supported
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001'))
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001'))
INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0)))
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12)
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}'] CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
# fastapi middleware
app.debug = DEBUG app.debug = DEBUG
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
@@ -54,25 +61,12 @@ app.add_middleware(
allow_headers=['*'], allow_headers=['*'],
) )
# logging
logging.basicConfig() logging.basicConfig()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO) logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
def validate_settings(): def __get_token(request: Request) -> dict:
session = sessionmaker(bind=db)()
lease_expire_delta_min, lease_expire_delta_max = 86_400, 7_776_000
for instance in session.query(Instance).all():
lease_expire_delta = instance.lease_expire_delta
if lease_expire_delta < 86_400 or lease_expire_delta > 7_776_000:
logging.warning(f'> [ instance ]: {instance.instance_ref}: "lease_expire_delta" should be between {lease_expire_delta_min} and {lease_expire_delta_max}')
session.close()
def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
authorization_header = request.headers.get('authorization') authorization_header = request.headers.get('authorization')
token = authorization_header.split(' ')[1] token = authorization_header.split(' ')[1]
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False}) return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
@@ -89,26 +83,24 @@ async def _index():
@app.get('/-/health', summary='* Health') @app.get('/-/health', summary='* Health')
async def _health(): async def _health(request: Request):
return JSONr({'status': 'up'}) return JSONr({'status': 'up'})
@app.get('/-/config', summary='* Config', description='returns environment variables.') @app.get('/-/config', summary='* Config', description='returns environment variables.')
async def _config(): async def _config():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
return JSONr({ return JSONr({
'VERSION': str(VERSION), 'VERSION': str(VERSION),
'COMMIT': str(COMMIT), 'COMMIT': str(COMMIT),
'DEBUG': str(DEBUG), 'DEBUG': str(DEBUG),
'DLS_URL': str(DLS_URL), 'DLS_URL': str(DLS_URL),
'DLS_PORT': str(DLS_PORT), 'DLS_PORT': str(DLS_PORT),
'SITE_KEY_XID': str(default_site.site_key), 'SITE_KEY_XID': str(SITE_KEY_XID),
'INSTANCE_REF': str(default_instance.instance_ref), 'INSTANCE_REF': str(INSTANCE_REF),
'ALLOTMENT_REF': [str(ALLOTMENT_REF)], 'ALLOTMENT_REF': [str(ALLOTMENT_REF)],
'TOKEN_EXPIRE_DELTA': str(default_instance.get_token_expire_delta()), 'TOKEN_EXPIRE_DELTA': str(TOKEN_EXPIRE_DELTA),
'LEASE_EXPIRE_DELTA': str(default_instance.get_lease_expire_delta()), 'LEASE_EXPIRE_DELTA': str(LEASE_EXPIRE_DELTA),
'LEASE_RENEWAL_PERIOD': str(default_instance.lease_renewal_period), 'LEASE_RENEWAL_PERIOD': str(LEASE_RENEWAL_PERIOD),
'CORS_ORIGINS': str(CORS_ORIGINS), 'CORS_ORIGINS': str(CORS_ORIGINS),
'TZ': str(TZ), 'TZ': str(TZ),
}) })
@@ -117,8 +109,6 @@ async def _config():
@app.get('/-/readme', summary='* Readme') @app.get('/-/readme', summary='* Readme')
async def _readme(): async def _readme():
from markdown import markdown from markdown import markdown
from util import load_file
content = load_file('../README.md').decode('utf-8') content = load_file('../README.md').decode('utf-8')
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc'])) return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
@@ -168,7 +158,8 @@ async def _origins(request: Request, leases: bool = False):
for origin in session.query(Origin).all(): for origin in session.query(Origin).all():
x = origin.serialize() x = origin.serialize()
if leases: if leases:
x['leases'] = list(map(lambda _: _.serialize(), Lease.find_by_origin_ref(db, origin.origin_ref))) serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
x['leases'] = list(map(lambda _: _.serialize(**serialize), Lease.find_by_origin_ref(db, origin.origin_ref)))
response.append(x) response.append(x)
session.close() session.close()
return JSONr(response) return JSONr(response)
@@ -185,7 +176,8 @@ async def _leases(request: Request, origin: bool = False):
session = sessionmaker(bind=db)() session = sessionmaker(bind=db)()
response = [] response = []
for lease in session.query(Lease).all(): for lease in session.query(Lease).all():
x = lease.serialize() serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
x = lease.serialize(**serialize)
if origin: if origin:
lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first() lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first()
if lease_origin is not None: if lease_origin is not None:
@@ -195,12 +187,6 @@ async def _leases(request: Request, origin: bool = False):
return JSONr(response) return JSONr(response)
@app.delete('/-/leases/expired', summary='* Leases')
async def _lease_delete_expired(request: Request):
Lease.delete_expired(db)
return Response(status_code=201)
@app.delete('/-/lease/{lease_ref}', summary='* Lease') @app.delete('/-/lease/{lease_ref}', summary='* Lease')
async def _lease_delete(request: Request, lease_ref: str): async def _lease_delete(request: Request, lease_ref: str):
if Lease.delete(db, lease_ref) == 1: if Lease.delete(db, lease_ref) == 1:
@@ -212,13 +198,37 @@ async def _lease_delete(request: Request, lease_ref: str):
@app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance') @app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance')
async def _client_token(): async def _client_token():
cur_time = datetime.utcnow() cur_time = datetime.utcnow()
exp_time = cur_time + CLIENT_TOKEN_EXPIRE_DELTA
default_instance = Instance.get_default_instance(db) if HA_REPLICATE is not None and HA_ROLE.lower() == "secondary":
public_key = default_instance.get_public_key() return RedirectResponse(f'https://{HA_REPLICATE}/-/client-token')
# todo: implemented request parameter to support different instances
jwt_encode_key = default_instance.get_jwt_encode_key()
exp_time = cur_time + default_instance.get_client_token_expire_delta() idx_port, idx_node = 0, 0
def create_svc_port_set(port: int):
idx = idx_port
return {
"idx": idx,
"d_name": "DLS",
"svc_port_map": [{"service": "auth", "port": port}, {"service": "lease", "port": port}]
}
def create_node_url(url: str, svc_port_set_idx: int):
idx = idx_node
return {"idx": idx, "url": url, "url_qr": url, "svc_port_set_idx": svc_port_set_idx}
service_instance_configuration = {
"nls_service_instance_ref": INSTANCE_REF,
"svc_port_set_list": [create_svc_port_set(DLS_PORT)],
"node_url_list": [create_node_url(DLS_URL, idx_port)]
}
idx_port += 1
idx_node += 1
if HA_REPLICATE is not None and HA_ROLE.lower() == "primary":
SEC_URL, SEC_PORT, *invalid = HA_REPLICATE.split(':')
service_instance_configuration['svc_port_set_list'].append(create_svc_port_set(SEC_PORT))
service_instance_configuration['node_url_list'].append(create_node_url(SEC_URL, idx_port))
payload = { payload = {
"jti": str(uuid4()), "jti": str(uuid4()),
@@ -230,23 +240,13 @@ async def _client_token():
"update_mode": "ABSOLUTE", "update_mode": "ABSOLUTE",
"scope_ref_list": [ALLOTMENT_REF], "scope_ref_list": [ALLOTMENT_REF],
"fulfillment_class_ref_list": [], "fulfillment_class_ref_list": [],
"service_instance_configuration": { "service_instance_configuration": service_instance_configuration,
"nls_service_instance_ref": default_instance.instance_ref,
"svc_port_set_list": [
{
"idx": 0,
"d_name": "DLS",
"svc_port_map": [{"service": "auth", "port": DLS_PORT}, {"service": "lease", "port": DLS_PORT}]
}
],
"node_url_list": [{"idx": 0, "url": DLS_URL, "url_qr": DLS_URL, "svc_port_set_idx": 0}]
},
"service_instance_public_key_configuration": { "service_instance_public_key_configuration": {
"service_instance_public_key_me": { "service_instance_public_key_me": {
"mod": hex(public_key.public_key().n)[2:], "mod": hex(INSTANCE_KEY_PUB.public_key().n)[2:],
"exp": int(public_key.public_key().e), "exp": int(INSTANCE_KEY_PUB.public_key().e),
}, },
"service_instance_public_key_pem": public_key.export_key().decode('utf-8'), "service_instance_public_key_pem": INSTANCE_KEY_PUB.export_key().decode('utf-8'),
"key_retention_mode": "LATEST_ONLY" "key_retention_mode": "LATEST_ONLY"
}, },
} }
@@ -260,6 +260,67 @@ async def _client_token():
return response return response
@app.get('/-/ha/replicate', summary='* HA replicate - trigger')
async def _ha_replicate_to_ha(request: Request, background_tasks: BackgroundTasks):
if HA_REPLICATE is None or HA_ROLE is None:
logger.warning('HA replicate endpoint triggerd, but no value for "HA_REPLICATE" or "HA_ROLE" is set!')
return JSONr(status_code=503, content={'status': 503, 'detail': 'no value for "HA_REPLICATE" or "HA_ROLE" set'})
session = sessionmaker(bind=db)()
origins = [origin.serialize() for origin in session.query(Origin).all()]
leases = [lease.serialize(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA) for lease in session.query(Lease).all()]
background_tasks.add_task(ha_replicate, logger, HA_REPLICATE, HA_ROLE, VERSION, DLS_URL, DLS_PORT, SITE_KEY_XID, INSTANCE_REF, origins, leases)
return JSONr(status_code=202, content=None)
@app.put('/-/ha/replicate', summary='* HA replicate')
async def _ha_replicate_by_ha(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
if HA_REPLICATE is None:
logger.warning(f'HA replicate endpoint triggerd, but no value for "HA_REPLICATE" is set!')
return JSONr(status_code=503, content={'status': 503, 'detail': 'no value for "HA_REPLICATE" set'})
version = j.get('VERSION')
if version != VERSION:
logger.error(f'Version missmatch on HA replication task!')
return JSONr(status_code=503, content={'status': 503, 'detail': 'Missmatch for "VERSION"'})
site_key_xid = j.get('SITE_KEY_XID')
if site_key_xid != SITE_KEY_XID:
logger.error(f'Site-Key missmatch on HA replication task!')
return JSONr(status_code=503, content={'status': 503, 'detail': 'Missmatch for "SITE_KEY_XID"'})
instance_ref = j.get('INSTANCE_REF')
if instance_ref != INSTANCE_REF:
logger.error(f'Version missmatch on HA replication task!')
return JSONr(status_code=503, content={'status': 503, 'detail': 'Missmatch for "INSTANCE_REF"'})
sync_timestamp, max_seconds_behind = datetime.fromisoformat(j.get('sync_timestamp')), 30
if sync_timestamp <= cur_time - timedelta(seconds=max_seconds_behind):
logger.error(f'Request time more than {max_seconds_behind}s behind!')
return JSONr(status_code=503, content={'status': 503, 'detail': 'Request time behind'})
origins, leases = j.get('origins'), j.get('leases')
for origin in origins:
origin_ref = origin.get('origin_ref')
logging.info(f'> [ ha ]: origin {origin_ref}')
data = Origin.deserialize(origin)
Origin.create_or_update(db, data)
for lease in leases:
lease_ref = lease.get('lease_ref')
x = Lease.find_by_lease_ref(db, lease_ref)
if x is not None and x.lease_updated > sync_timestamp:
continue
logging.info(f'> [ ha ]: lease {lease_ref}')
data = Lease.deserialize(lease)
Lease.create_or_update(db, data)
return JSONr(status_code=202, content=None)
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py # venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py
@app.post('/auth/v1/origin', description='find or create an origin') @app.post('/auth/v1/origin', description='find or create an origin')
async def auth_v1_origin(request: Request): async def auth_v1_origin(request: Request):
@@ -328,16 +389,13 @@ async def auth_v1_code(request: Request):
delta = relativedelta(minutes=15) delta = relativedelta(minutes=15)
expires = cur_time + delta expires = cur_time + delta
default_site = Site.get_default_site(db)
jwt_encode_key = Instance.get_default_instance(db).get_jwt_encode_key()
payload = { payload = {
'iat': timegm(cur_time.timetuple()), 'iat': timegm(cur_time.timetuple()),
'exp': timegm(expires.timetuple()), 'exp': timegm(expires.timetuple()),
'challenge': j.get('code_challenge'), 'challenge': j.get('code_challenge'),
'origin_ref': j.get('origin_ref'), 'origin_ref': j.get('origin_ref'),
'key_ref': default_site.site_key, 'key_ref': SITE_KEY_XID,
'kid': default_site.site_key, 'kid': SITE_KEY_XID
} }
auth_code = jws.sign(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256) auth_code = jws.sign(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@@ -357,11 +415,8 @@ async def auth_v1_code(request: Request):
async def auth_v1_token(request: Request): async def auth_v1_token(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow() j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
jwt_encode_key, jwt_decode_key = default_instance.get_jwt_encode_key(), default_instance.get_jwt_decode_key()
try: try:
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key, algorithms=[ALGORITHMS.RS256]) payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key)
except JWTError as e: except JWTError as e:
return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)}) return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)})
@@ -373,7 +428,7 @@ async def auth_v1_token(request: Request):
if payload.get('challenge') != challenge: if payload.get('challenge') != challenge:
return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'}) return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'})
access_expires_on = cur_time + default_instance.get_token_expire_delta() access_expires_on = cur_time + TOKEN_EXPIRE_DELTA
new_payload = { new_payload = {
'iat': timegm(cur_time.timetuple()), 'iat': timegm(cur_time.timetuple()),
@@ -382,8 +437,8 @@ async def auth_v1_token(request: Request):
'aud': 'https://cls.nvidia.org', 'aud': 'https://cls.nvidia.org',
'exp': timegm(access_expires_on.timetuple()), 'exp': timegm(access_expires_on.timetuple()),
'origin_ref': origin_ref, 'origin_ref': origin_ref,
'key_ref': default_site.site_key, 'key_ref': SITE_KEY_XID,
'kid': default_site.site_key, 'kid': SITE_KEY_XID,
} }
auth_token = jwt.encode(new_payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256) auth_token = jwt.encode(new_payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@@ -400,13 +455,10 @@ async def auth_v1_token(request: Request):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py # venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin') @app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin')
async def leasing_v1_lessor(request: Request): async def leasing_v1_lessor(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow() j, token, cur_time = json_loads((await request.body()).decode('utf-8')), __get_token(request), datetime.utcnow()
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
try: try:
token = __get_token(request, jwt_decode_key) token = __get_token(request)
except JWTError: except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'}) return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
@@ -420,7 +472,7 @@ async def leasing_v1_lessor(request: Request):
# return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]') # return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]')
lease_ref = str(uuid4()) lease_ref = str(uuid4())
expires = cur_time + default_instance.get_lease_expire_delta() expires = cur_time + LEASE_EXPIRE_DELTA
lease_result_list.append({ lease_result_list.append({
"ordinal": 0, "ordinal": 0,
# https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html # https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html
@@ -428,13 +480,13 @@ async def leasing_v1_lessor(request: Request):
"ref": lease_ref, "ref": lease_ref,
"created": cur_time.isoformat(), "created": cur_time.isoformat(),
"expires": expires.isoformat(), "expires": expires.isoformat(),
"recommended_lease_renewal": default_instance.lease_renewal_period, "recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
"offline_lease": "true", "offline_lease": "true",
"license_type": "CONCURRENT_COUNTED_SINGLE" "license_type": "CONCURRENT_COUNTED_SINGLE"
} }
}) })
data = Lease(instance_ref=default_instance.instance_ref, origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires) data = Lease(origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
Lease.create_or_update(db, data) Lease.create_or_update(db, data)
response = { response = {
@@ -451,14 +503,7 @@ async def leasing_v1_lessor(request: Request):
# venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql # venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql
@app.get('/leasing/v1/lessor/leases', description='get active leases for current origin') @app.get('/leasing/v1/lessor/leases', description='get active leases for current origin')
async def leasing_v1_lessor_lease(request: Request): async def leasing_v1_lessor_lease(request: Request):
cur_time = datetime.utcnow() token, cur_time = __get_token(request), datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
@@ -478,15 +523,7 @@ async def leasing_v1_lessor_lease(request: Request):
# venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py # venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py
@app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease') @app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease')
async def leasing_v1_lease_renew(request: Request, lease_ref: str): async def leasing_v1_lease_renew(request: Request, lease_ref: str):
cur_time = datetime.utcnow() token, cur_time = __get_token(request), datetime.utcnow()
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}') logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
@@ -495,11 +532,11 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
if entity is None: if entity is None:
return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'}) return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'})
expires = cur_time + default_instance.get_lease_expire_delta() expires = cur_time + LEASE_EXPIRE_DELTA
response = { response = {
"lease_ref": lease_ref, "lease_ref": lease_ref,
"expires": expires.isoformat(), "expires": expires.isoformat(),
"recommended_lease_renewal": default_instance.lease_renewal_period, "recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
"offline_lease": True, "offline_lease": True,
"prompts": None, "prompts": None,
"sync_timestamp": cur_time.isoformat(), "sync_timestamp": cur_time.isoformat(),
@@ -513,14 +550,7 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py # venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py
@app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease') @app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease')
async def leasing_v1_lease_delete(request: Request, lease_ref: str): async def leasing_v1_lease_delete(request: Request, lease_ref: str):
cur_time = datetime.utcnow() token, cur_time = __get_token(request), datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}') logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
@@ -546,14 +576,7 @@ async def leasing_v1_lease_delete(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py # venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.delete('/leasing/v1/lessor/leases', description='release all leases') @app.delete('/leasing/v1/lessor/leases', description='release all leases')
async def leasing_v1_lessor_lease_remove(request: Request): async def leasing_v1_lessor_lease_remove(request: Request):
cur_time = datetime.utcnow() token, cur_time = __get_token(request), datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
@@ -575,8 +598,6 @@ async def leasing_v1_lessor_lease_remove(request: Request):
async def leasing_v1_lessor_shutdown(request: Request): async def leasing_v1_lessor_shutdown(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow() j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
token = j.get('token') token = j.get('token')
token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False}) token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
@@ -597,22 +618,30 @@ async def leasing_v1_lessor_shutdown(request: Request):
@app.on_event('startup') @app.on_event('startup')
async def app_on_startup(): async def app_on_startup():
default_instance = Instance.get_default_instance(db)
lease_renewal_period = default_instance.lease_renewal_period
lease_renewal_delta = default_instance.get_lease_renewal_delta()
client_token_expire_delta = default_instance.get_client_token_expire_delta()
logger.info(f''' logger.info(f'''
Using timezone: {str(TZ)}. Make sure this is correct and match your clients! Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}. Your clients renew their license every {str(Lease.calculate_renewal(LEASE_RENEWAL_PERIOD, LEASE_RENEWAL_DELTA))}.
If the renewal fails, the license is valid for {str(lease_renewal_delta)}. If the renewal fails, the license is {str(LEASE_RENEWAL_DELTA)} valid.
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}. Your client-token file (.tok) is valid for {str(CLIENT_TOKEN_EXPIRE_DELTA)}.
''') ''')
validate_settings() if HA_REPLICATE is not None and HA_ROLE is not None:
from hashlib import sha1
sha1digest = sha1(INSTANCE_KEY_RSA.export_key()).hexdigest()
fingerprint_key = ':'.join(sha1digest[i: i + 2] for i in range(0, len(sha1digest), 2))
sha1digest = sha1(INSTANCE_KEY_PUB.export_key()).hexdigest()
fingerprint_pub = ':'.join(sha1digest[i: i + 2] for i in range(0, len(sha1digest), 2))
logger.info(f'''
HA mode is enabled. Make sure theses fingerprints matches on all your nodes:
- INSTANCE_KEY_RSA: "{str(fingerprint_key)}"
- INSTANCE_KEY_PUB: "{str(fingerprint_pub)}"
This node ({HA_ROLE}) listens to "https://{DLS_URL}:{DLS_PORT}" and replicates to "https://{HA_REPLICATE}".
''')
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -1,143 +1,18 @@
import logging
from datetime import datetime, timedelta from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text, BLOB, INT, FLOAT
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship from sqlalchemy.orm import sessionmaker, declarative_base
from app.util import parse_key
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
Base = declarative_base() Base = declarative_base()
class Site(Base):
__tablename__ = "site"
INITIAL_SITE_KEY_XID = '00000000-0000-0000-0000-000000000000'
INITIAL_SITE_NAME = 'default'
site_key = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, SITE_KEY_XID
name = Column(VARCHAR(length=256), nullable=False)
def __str__(self):
return f'SITE_KEY_XID: {self.site_key}'
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Site.__table__).compile(engine)
@staticmethod
def get_default_site(engine: Engine) -> "Site":
session = sessionmaker(bind=engine)()
entity = session.query(Site).filter(Site.site_key == Site.INITIAL_SITE_KEY_XID).first()
session.close()
return entity
class Instance(Base):
__tablename__ = "instance"
DEFAULT_INSTANCE_REF = '10000000-0000-0000-0000-000000000001'
DEFAULT_TOKEN_EXPIRE_DELTA = 86_400 # 1 day
DEFAULT_LEASE_EXPIRE_DELTA = 7_776_000 # 90 days
DEFAULT_LEASE_RENEWAL_PERIOD = 0.15
DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA = 378_432_000 # 12 years
# 1 day = 86400 (min. in production setup, max 90 days), 1 hour = 3600
instance_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, INSTANCE_REF
site_key = Column(CHAR(length=36), ForeignKey(Site.site_key, ondelete='CASCADE'), nullable=False, index=True) # uuid4
private_key = Column(BLOB(length=2048), nullable=False)
public_key = Column(BLOB(length=512), nullable=False)
token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_TOKEN_EXPIRE_DELTA, comment='in seconds')
lease_expire_delta = Column(INT(), nullable=False, default=DEFAULT_LEASE_EXPIRE_DELTA, comment='in seconds')
lease_renewal_period = Column(FLOAT(precision=2), nullable=False, default=DEFAULT_LEASE_RENEWAL_PERIOD)
client_token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA, comment='in seconds')
__origin = relationship(Site, foreign_keys=[site_key])
def __str__(self):
return f'INSTANCE_REF: {self.instance_ref} (SITE_KEY_XID: {self.site_key})'
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Instance.__table__).compile(engine)
@staticmethod
def create_or_update(engine: Engine, instance: "Instance"):
session = sessionmaker(bind=engine)()
entity = session.query(Instance).filter(Instance.instance_ref == instance.instance_ref).first()
if entity is None:
session.add(instance)
else:
x = dict(
site_key=instance.site_key,
private_key=instance.private_key,
public_key=instance.public_key,
token_expire_delta=instance.token_expire_delta,
lease_expire_delta=instance.lease_expire_delta,
lease_renewal_period=instance.lease_renewal_period,
client_token_expire_delta=instance.client_token_expire_delta,
)
session.execute(update(Instance).where(Instance.instance_ref == instance.instance_ref).values(**x))
session.commit()
session.flush()
session.close()
# todo: validate on startup that "lease_expire_delta" is between 1 day and 90 days
@staticmethod
def get_default_instance(engine: Engine) -> "Instance":
session = sessionmaker(bind=engine)()
site = Site.get_default_site(engine)
entity = session.query(Instance).filter(Instance.site_key == site.site_key).first()
session.close()
return entity
def get_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.token_expire_delta)
def get_lease_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.lease_expire_delta)
def get_lease_renewal_delta(self) -> "datetime.timedelta":
return timedelta(seconds=self.lease_expire_delta)
def get_client_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.client_token_expire_delta)
def __get_private_key(self) -> "RsaKey":
return parse_key(self.private_key)
def get_public_key(self) -> "RsaKey":
return parse_key(self.public_key)
def get_jwt_encode_key(self) -> "jose.jkw":
from jose import jwk
from jose.constants import ALGORITHMS
return jwk.construct(self.__get_private_key().export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_jwt_decode_key(self) -> "jose.jwt":
from jose import jwk
from jose.constants import ALGORITHMS
return jwk.construct(self.get_public_key().export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_private_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
def get_public_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
class Origin(Base): class Origin(Base):
__tablename__ = "origin" __tablename__ = "origin"
origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4 origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4
# service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF') # service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF')
hostname = Column(VARCHAR(length=256), nullable=True) hostname = Column(VARCHAR(length=256), nullable=True)
guest_driver_version = Column(VARCHAR(length=10), nullable=True) guest_driver_version = Column(VARCHAR(length=10), nullable=True)
@@ -157,6 +32,16 @@ class Origin(Base):
'os_version': self.os_version, 'os_version': self.os_version,
} }
@staticmethod
def deserialize(j) -> "Origin":
return Origin(
origin_ref=j.get('origin_ref'),
hostname=j.get('hostname'),
guest_driver_version=j.get('guest_driver_version'),
os_platform=j.get('os_platform'),
os_version=j.get('os_version'),
)
@staticmethod @staticmethod
def create_statement(engine: Engine): def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable from sqlalchemy.schema import CreateTable
@@ -195,24 +80,18 @@ class Origin(Base):
class Lease(Base): class Lease(Base):
__tablename__ = "lease" __tablename__ = "lease"
instance_ref = Column(CHAR(length=36), ForeignKey(Instance.instance_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4 lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4 origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
# scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF') # scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF')
lease_created = Column(DATETIME(), nullable=False) lease_created = Column(DATETIME(), nullable=False)
lease_expires = Column(DATETIME(), nullable=False) lease_expires = Column(DATETIME(), nullable=False)
lease_updated = Column(DATETIME(), nullable=False) lease_updated = Column(DATETIME(), nullable=False)
__instance = relationship(Instance, foreign_keys=[instance_ref])
__origin = relationship(Origin, foreign_keys=[origin_ref])
def __repr__(self): def __repr__(self):
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})' return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
def serialize(self) -> dict: def serialize(self, renewal_period: float, renewal_delta: timedelta) -> dict:
renewal_period = self.__instance.lease_renewal_period
renewal_delta = self.__instance.get_lease_renewal_delta
lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds()) lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds())
lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal) lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal)
@@ -226,6 +105,16 @@ class Lease(Base):
'lease_renewal': lease_renewal.isoformat(), 'lease_renewal': lease_renewal.isoformat(),
} }
@staticmethod
def deserialize(j) -> "Lease":
return Lease(
lease_ref=j.get('lease_ref'),
origin_ref=j.get('origin_ref'),
lease_created=datetime.fromisoformat(j.get('lease_created')),
lease_expires=datetime.fromisoformat(j.get('lease_expires')),
lease_updated=datetime.fromisoformat(j.get('lease_updated')),
)
@staticmethod @staticmethod
def create_statement(engine: Engine): def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable from sqlalchemy.schema import CreateTable
@@ -291,14 +180,6 @@ class Lease(Base):
session.close() session.close()
return deletions return deletions
@staticmethod
def delete_expired(engine: Engine) -> int:
session = sessionmaker(bind=engine)()
deletions = session.query(Lease).filter(Lease.lease_expires <= datetime.utcnow()).delete()
session.commit()
session.close()
return deletions
@staticmethod @staticmethod
def calculate_renewal(renewal_period: float, delta: timedelta) -> timedelta: def calculate_renewal(renewal_period: float, delta: timedelta) -> timedelta:
""" """
@@ -322,104 +203,38 @@ class Lease(Base):
return renew return renew
def init_default_site(session: Session):
from uuid import uuid4
from app.util import generate_key
private_key = generate_key()
public_key = private_key.public_key()
site = Site(
site_key=Site.INITIAL_SITE_KEY_XID,
name=Site.INITIAL_SITE_NAME
)
session.add(site)
session.commit()
instance = Instance(
instance_ref=Instance.DEFAULT_INSTANCE_REF,
site_key=site.site_key,
private_key=private_key.export_key(),
public_key=public_key.export_key(),
)
session.add(instance)
session.commit()
def init(engine: Engine): def init(engine: Engine):
tables = [Site, Instance, Origin, Lease] tables = [Origin, Lease]
db = inspect(engine) db = inspect(engine)
session = sessionmaker(bind=engine)() session = sessionmaker(bind=engine)()
for table in tables: for table in tables:
exists = db.dialect.has_table(engine.connect(), table.__tablename__) if not db.dialect.has_table(engine.connect(), table.__tablename__):
logger.info(f'> Table "{table.__tablename__:<16}" exists: {exists}')
if not exists:
session.execute(text(str(table.create_statement(engine)))) session.execute(text(str(table.create_statement(engine))))
session.commit() session.commit()
# create default site
cnt = session.query(Site).count()
if cnt == 0:
init_default_site(session)
session.flush()
session.close() session.close()
def migrate(engine: Engine): def migrate(engine: Engine):
from os import getenv as env
from os.path import join, dirname, isfile
from util import load_key
db = inspect(engine) db = inspect(engine)
# todo: add update guide to use 1.LATEST to 2.0 def upgrade_1_0_to_1_1():
def upgrade_1_x_to_2_0(): x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
site = Site.get_default_site(engine) x = next(_ for _ in x if _['name'] == 'origin_ref')
logger.info(site) if x['primary_key'] > 0:
instance = Instance.get_default_instance(engine) print('Found old database schema with "origin_ref" as primary-key in "lease" table. Dropping table!')
logger.info(instance) print(' Your leases are recreated on next renewal!')
print(' If an error message appears on the client, you can ignore it.')
Lease.__table__.drop(bind=engine)
init(engine)
# SITE_KEY_XID # def upgrade_1_2_to_1_3():
if site_key := env('SITE_KEY_XID', None) is not None: # x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
site.site_key = str(site_key) # x = next((_ for _ in x if _['name'] == 'scope_ref'), None)
# if x is None:
# Lease.scope_ref.compile()
# column_name = Lease.scope_ref.name
# column_type = Lease.scope_ref.type.compile(engine.dialect)
# engine.execute(f'ALTER TABLE "{Lease.__tablename__}" ADD COLUMN "{column_name}" {column_type}')
# INSTANCE_REF upgrade_1_0_to_1_1()
if instance_ref := env('INSTANCE_REF', None) is not None: # upgrade_1_2_to_1_3()
instance.instance_ref = str(instance_ref)
# ALLOTMENT_REF
if allotment_ref := env('ALLOTMENT_REF', None) is not None:
pass # todo
# INSTANCE_KEY_RSA, INSTANCE_KEY_PUB
default_instance_private_key_path = str(join(dirname(__file__), 'cert/instance.private.pem'))
if instance_private_key := env('INSTANCE_KEY_RSA', None) is not None:
instance.private_key = load_key(str(instance_private_key))
elif isfile(default_instance_private_key_path):
instance.private_key = load_key(default_instance_private_key_path)
default_instance_public_key_path = str(join(dirname(__file__), 'cert/instance.public.pem'))
if instance_public_key := env('INSTANCE_KEY_PUB', None) is not None:
instance.public_key = load_key(str(instance_public_key))
elif isfile(default_instance_public_key_path):
instance.public_key = load_key(default_instance_public_key_path)
# TOKEN_EXPIRE_DELTA
if token_expire_delta := env('TOKEN_EXPIRE_DAYS', None) not in (None, 0):
instance.token_expire_delta = token_expire_delta * 86_400
if token_expire_delta := env('TOKEN_EXPIRE_HOURS', None) not in (None, 0):
instance.token_expire_delta = token_expire_delta * 3_600
# LEASE_EXPIRE_DELTA, LEASE_RENEWAL_DELTA
if lease_expire_delta := env('LEASE_EXPIRE_DAYS', None) not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 86_400
if lease_expire_delta := env('LEASE_EXPIRE_HOURS', None) not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 3_600
# LEASE_RENEWAL_PERIOD
if lease_renewal_period := env('LEASE_RENEWAL_PERIOD', None) is not None:
instance.lease_renewal_period = lease_renewal_period
# todo: update site, instance
upgrade_1_x_to_2_0()

View File

@@ -16,18 +16,6 @@ def load_key(filename) -> "RsaKey":
return RSA.import_key(extern_key=load_file(filename), passphrase=None) return RSA.import_key(extern_key=load_file(filename), passphrase=None)
def parse_key(content: bytes) -> "RsaKey":
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
return RSA.import_key(extern_key=content, passphrase=None)
def generate_key() -> "RsaKey": def generate_key() -> "RsaKey":
try: try:
# Crypto | Cryptodome on Debian # Crypto | Cryptodome on Debian
@@ -38,3 +26,29 @@ def generate_key() -> "RsaKey":
from Cryptodome.PublicKey.RSA import RsaKey from Cryptodome.PublicKey.RSA import RsaKey
return RSA.generate(bits=2048) return RSA.generate(bits=2048)
def ha_replicate(logger: "logging.Logger", ha_replicate: str, ha_role: str, version: str, dls_url: str, dls_port: int, site_key_xid: str, instance_ref: str, origins: list, leases: list) -> bool:
from datetime import datetime
import httpx
if f'{dls_url}:{dls_port}' == ha_replicate:
logger.error(f'Failed to replicate this node ({ha_role}) to "{ha_replicate}": can\'t replicate to itself')
return False
data = {
'VERSION': str(version),
'HA_REPLICATE': f'{dls_url}:{dls_port}',
'SITE_KEY_XID': str(site_key_xid),
'INSTANCE_REF': str(instance_ref),
'origins': origins,
'leases': leases,
'sync_timestamp': datetime.utcnow().isoformat(),
}
r = httpx.put(f'https://{ha_replicate}/-/ha/replicate', json=data, verify=False)
if r.status_code == 202:
logger.info(f'Successfully replicated this node ({ha_role}) to "{ha_replicate}".')
return True
logger.error(f'Failed to replicate this node ({ha_role}) to "{ha_replicate}": {r.status_code} - {r.content}')
return False

View File

@@ -1,8 +1,9 @@
fastapi==0.97.0 fastapi==0.92.0
uvicorn[standard]==0.22.0 uvicorn[standard]==0.20.0
python-jose==3.3.0 python-jose==3.3.0
pycryptodome==3.18.0 pycryptodome==3.17
python-dateutil==2.8.2 python-dateutil==2.8.2
sqlalchemy==2.0.16 sqlalchemy==2.0.3
markdown==3.4.3 markdown==3.4.1
python-dotenv==1.0.0 python-dotenv==0.21.1
httpx==0.23.3

View File

@@ -1,15 +1,14 @@
from os import getenv as env
from base64 import b64encode as b64enc from base64 import b64encode as b64enc
from hashlib import sha256 from hashlib import sha256
from calendar import timegm from calendar import timegm
from datetime import datetime from datetime import datetime
from uuid import UUID, uuid4 from os.path import dirname, join
from uuid import uuid4, UUID
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from jose import jwt from jose import jwt, jwk
from jose.constants import ALGORITHMS from jose.constants import ALGORITHMS
from starlette.testclient import TestClient from starlette.testclient import TestClient
from sqlalchemy import create_engine
import sys import sys
# add relative path to use packages as they were in the app/ dir # add relative path to use packages as they were in the app/ dir
@@ -17,23 +16,20 @@ sys.path.append('../')
sys.path.append('../app') sys.path.append('../app')
from app import main from app import main
from app.orm import init as db_init, migrate, Site, Instance from app.util import load_key
client = TestClient(main.app)
ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld' ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld'
# fastapi setup # INSTANCE_KEY_RSA = generate_key()
client = TestClient(main.app) # INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
# database setup INSTANCE_KEY_RSA = load_key(str(join(dirname(__file__), '../app/cert/instance.private.pem')))
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite'))) INSTANCE_KEY_PUB = load_key(str(join(dirname(__file__), '../app/cert/instance.public.pem')))
db_init(db), migrate(db)
# test vars jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
DEFAULT_SITE, DEFAULT_INSTANCE = Site.get_default_site(db), Instance.get_default_instance(db) jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
SITE_KEY = DEFAULT_SITE.site_key
jwt_encode_key, jwt_decode_key = DEFAULT_INSTANCE.get_jwt_encode_key(), DEFAULT_INSTANCE.get_jwt_decode_key()
def __bearer_token(origin_ref: str) -> str: def __bearer_token(origin_ref: str) -> str:
@@ -42,12 +38,6 @@ def __bearer_token(origin_ref: str) -> str:
return token return token
def test_initial_default_site_and_instance():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
assert default_site.site_key == Site.INITIAL_SITE_KEY_XID
assert default_instance.instance_ref == Instance.DEFAULT_INSTANCE_REF
def test_index(): def test_index():
response = client.get('/') response = client.get('/')
assert response.status_code == 200 assert response.status_code == 200
@@ -163,7 +153,8 @@ def test_auth_v1_token():
"kid": "00000000-0000-0000-0000-000000000000" "kid": "00000000-0000-0000-0000-000000000000"
} }
payload = { payload = {
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256), "auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')},
algorithm=ALGORITHMS.RS256),
"code_verifier": SECRET, "code_verifier": SECRET,
} }

1
version.env Normal file
View File

@@ -0,0 +1 @@
VERSION=1.3.5