mirror of
https://gitea.publichub.eu/oscar.krause/fastapi-dls.git
synced 2026-04-06 00:45:47 +03:00
Compare commits
54 Commits
1.3.5
...
248c70a862
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
248c70a862 | ||
|
|
39a2408d8d | ||
|
|
18807401e4 | ||
|
|
5e47ad7729 | ||
|
|
20448bc587 | ||
|
|
5e945bc43a | ||
|
|
b4150fa527 | ||
|
|
38e1a1725c | ||
|
|
c79636b1c2 | ||
|
|
8de9a89e56 | ||
|
|
801d1786ef | ||
|
|
7e5f8b6c8a | ||
|
|
98da86fc2e | ||
|
|
14cf6a953f | ||
|
|
6a5d3cb2f7 | ||
|
|
774a1c21a1 | ||
|
|
d1a77df0e1 | ||
|
|
c9c73f6cf2 | ||
|
|
b216dcb3dd | ||
|
|
d2e4042932 | ||
|
|
04a1ee0948 | ||
|
|
c1b5f83f44 | ||
|
|
9d1422cbdf | ||
|
|
7b7f14bd82 | ||
|
|
f72c0f7db3 | ||
|
|
76d8753f28 | ||
|
|
593db0e789 | ||
|
|
3d9e3cb88f | ||
|
|
995b944135 | ||
|
|
e200c84345 | ||
|
|
04ff36c94d | ||
|
|
89704bc2a1 | ||
|
|
6395214fa0 | ||
|
|
c8e000eb3e | ||
|
|
c8e5676c01 | ||
|
|
6f11bc414c | ||
|
|
1fc5ac8378 | ||
|
|
87334fbfad | ||
|
|
0fac033657 | ||
|
|
7cd4e6fde0 | ||
|
|
a22b56edbe | ||
|
|
e42dc6aa86 | ||
|
|
86f703a36c | ||
|
|
71795cc7a2 | ||
|
|
4ef041bb54 | ||
|
|
88c8fb98da | ||
|
|
a7b4a4b631 | ||
|
|
7ccb254cbf | ||
|
|
1d5d3b31fb | ||
|
|
7af2e02627 | ||
|
|
938fc6bd60 | ||
|
|
1b9ebb48b1 | ||
|
|
4972f00822 | ||
|
|
210a36c07f |
@@ -22,8 +22,9 @@ sha256sums=('SKIP'
|
||||
'3dc60140c08122a8ec0e7fa7f0937eb8c1288058890ba09478420fc30ce9e30c')
|
||||
|
||||
pkgver() {
|
||||
echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > $srcdir/$pkgname/version.env
|
||||
source $srcdir/$pkgname/version.env
|
||||
echo ${VERSION}
|
||||
echo $VERSION
|
||||
}
|
||||
|
||||
check() {
|
||||
|
||||
48
.UNRAID/FastAPI-DLS.xml
Normal file
48
.UNRAID/FastAPI-DLS.xml
Normal file
@@ -0,0 +1,48 @@
|
||||
<?xml version="1.0"?>
|
||||
<Container version="2">
|
||||
<Name>FastAPI-DLS</Name>
|
||||
<Repository>collinwebdesigns/fastapi-dls:latest</Repository>
|
||||
<Registry>https://hub.docker.com/r/collinwebdesigns/fastapi-dls</Registry>
|
||||
<Network>br0</Network>
|
||||
<MyIP></MyIP>
|
||||
<Shell>sh</Shell>
|
||||
<Privileged>false</Privileged>
|
||||
<Support/>
|
||||
<Project/>
|
||||
<Overview>Source:
|
||||
https://git.collinwebdesigns.de/oscar.krause/fastapi-dls#docker
|
||||

|
||||
Make sure you create these certificates before starting the container for the first time:
|
||||
```
|
||||
# Check https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/main/#docker for more information:
|
||||
WORKING_DIR=/mnt/user/appdata/fastapi-dls/cert
|
||||
mkdir -p $WORKING_DIR
|
||||
cd $WORKING_DIR
|
||||
# create instance private and public key for singing JWT's
|
||||
openssl genrsa -out $WORKING_DIR/instance.private.pem 2048 
|
||||
openssl rsa -in $WORKING_DIR/instance.private.pem -outform PEM -pubout -out $WORKING_DIR/instance.public.pem
|
||||
# create ssl certificate for integrated webserver (uvicorn) - because clients rely on ssl
|
||||
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $WORKING_DIR/webserver.key -out $WORKING_DIR/webserver.crt
|
||||
```
|
||||
</Overview>
|
||||
<Category/>
|
||||
<WebUI>https://[IP]:[PORT:443]</WebUI>
|
||||
<TemplateURL/>
|
||||
<Icon>https://git.collinwebdesigns.de/uploads/-/system/project/avatar/106/png-transparent-nvidia-grid-logo-business-nvidia-electronics-text-trademark.png?width=64</Icon>
|
||||
<ExtraParams>--restart always</ExtraParams>
|
||||
<PostArgs/>
|
||||
<CPUset/>
|
||||
<DateInstalled>1679161568</DateInstalled>
|
||||
<DonateText/>
|
||||
<DonateLink/>
|
||||
<Requires/>
|
||||
<Config Name="HTTPS Port" Target="" Default="443" Mode="tcp" Description="Same as DLS Port below." Type="Port" Display="always-hide" Required="true" Mask="false">443</Config>
|
||||
<Config Name="App Cert" Target="/app/cert" Default="/mnt/user/appdata/fastapi-dls/cert" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/cert</Config>
|
||||
<Config Name="DLS Port" Target="DSL_PORT" Default="443" Mode="" Description="Choose port you want to use. Make sure to change the HTTPS port above to match it." Type="Variable" Display="always-hide" Required="true" Mask="false">443</Config>
|
||||
<Config Name="App database" Target="/app/database" Default="/mnt/user/appdata/fastapi-dls/data" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/data</Config>
|
||||
<Config Name="DSL IP" Target="DLS_URL" Default="localhost" Mode="" Description="Put your container's IP (or your host's IP if it's shared)." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
|
||||
<Config Name="Time Zone" Target="TZ" Default="" Mode="" Description="Format example: America/New_York. MUST MATCH YOUR CURRENT TIMEZONE AND THE GUEST VMS TIMEZONE! Otherwise you'll get into issues, read the guide above." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
|
||||
<Config Name="Database" Target="DATABASE" Default="sqlite:////app/database/db.sqlite" Mode="" Description="Set to sqlite:////app/database/db.sqlite" Type="Variable" Display="advanced-hide" Required="true" Mask="false">sqlite:////app/database/db.sqlite</Config>
|
||||
<Config Name="Debug" Target="DEBUG" Default="true" Mode="" Description="true to enable debugging, false to disable them." Type="Variable" Display="advanced-hide" Required="false" Mask="false">true</Config>
|
||||
<Config Name="Lease" Target="LEASE_EXPIRE_DAYS" Default="90" Mode="" Description="90 days is the maximum value." Type="Variable" Display="advanced" Required="false" Mask="false">90</Config>
|
||||
</Container>
|
||||
197
.UNRAID/setup_vgpu_license.sh
Normal file
197
.UNRAID/setup_vgpu_license.sh
Normal file
@@ -0,0 +1,197 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script automates the licensing of the vGPU guest driver
|
||||
# on Unraid boot. Set the Schedule to: "At Startup of Array".
|
||||
#
|
||||
# Relies on FastAPI-DLS for the licensing.
|
||||
# It assumes FeatureType=1 (vGPU), change it as you see fit in line <114>
|
||||
#
|
||||
# Requires `eflutils` to be installed in the system for `nvidia-gridd` to run
|
||||
# To Install it:
|
||||
# 1) You might find it here: https://packages.slackware.com/ (choose the 64bit version of Slackware)
|
||||
# 2) Download the package and put it in /boot/extra to be installed on boot
|
||||
# 3) a. Reboot to install it, OR
|
||||
# b. Run `upgradepkg --install-new /boot/extra/elfutils*`
|
||||
# [i]: Make sure to have only one version of elfutils, otherwise you might run into issues
|
||||
|
||||
# Sources and docs:
|
||||
# https://docs.nvidia.com/grid/15.0/grid-vgpu-user-guide/index.html#configuring-nls-licensed-client-on-linux
|
||||
#
|
||||
|
||||
################################################
|
||||
# MAKE SURE YOU CHANGE THESE VARIABLES #
|
||||
################################################
|
||||
|
||||
###### CHANGE ME!
|
||||
# IP and PORT of FastAPI-DLS
|
||||
DLS_IP=192.168.0.123
|
||||
DLS_PORT=443
|
||||
# Token folder, must be on a filesystem that supports
|
||||
# linux filesystem permissions (eg: ext4,xfs,btrfs...)
|
||||
TOKEN_PATH=/mnt/user/system/nvidia
|
||||
PING=$(which ping)
|
||||
|
||||
# Check if the License is applied
|
||||
if [[ "$(nvidia-smi -q | grep "Expiry")" == *Expiry* ]]; then
|
||||
echo " [i] Your vGPU Guest drivers are already licensed."
|
||||
echo " [i] $(nvidia-smi -q | grep "Expiry")"
|
||||
echo " [<] Exiting..."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if the FastAPI-DLS server is reachable
|
||||
# Check if the License is applied
|
||||
MAX_RETRIES=30
|
||||
for i in $(seq 1 $MAX_RETRIES); do
|
||||
echo -ne "\r [>] Attempt $i to connect to $DLS_IP."
|
||||
if ping -c 1 $DLS_IP >/dev/null 2>&1; then
|
||||
echo -e "\n [*] Connection successful."
|
||||
break
|
||||
fi
|
||||
if [ $i -eq $MAX_RETRIES ]; then
|
||||
echo -e "\n [!] Connection failed after $MAX_RETRIES attempts."
|
||||
echo -e "\n [<] Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Check if the token folder exists
|
||||
if [ -d "${TOKEN_PATH}" ]; then
|
||||
echo " [*] Token Folder exists. Proceeding..."
|
||||
else
|
||||
echo " [!] Token Folder does not exists or not ready yet. Exiting."
|
||||
echo " [!] Token Folder Specified: ${TOKEN_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if elfutils are installed, otherwise nvidia-gridd service
|
||||
# wont start
|
||||
if [ "$(grep -R "elfutils" /var/log/packages/* | wc -l)" != 0 ]; then
|
||||
echo " [*] Elfutils is installed, proceeding..."
|
||||
else
|
||||
echo " [!] Elfutils is not installed, downloading and installing..."
|
||||
echo " [!] Downloading elfutils to /boot/extra"
|
||||
echo " [i] This script will download elfutils from slackware64-15.0 repository."
|
||||
echo " [i] If you have a different version of Unraid (6.11.5), you might want to"
|
||||
echo " [i] download and install a suitable version manually from the slackware"
|
||||
echo " [i] repository, and put it in /boot/extra to be install on boot."
|
||||
echo " [i] You may also install it by running: "
|
||||
echo " [i] upgradepkg --install-new /path/to/elfutils-*.txz"
|
||||
echo ""
|
||||
echo " [>] Downloading elfutils from slackware64-15.0 repository:"
|
||||
wget -q -nc --show-progress --progress=bar:force:noscroll -P /boot/extra https://slackware.uk/slackware/slackware64-15.0/slackware64/l/elfutils-0.186-x86_64-1.txz 2>/dev/null \
|
||||
|| { echo " [!] Error while downloading elfutils, please download it and install it manually."; exit 1; }
|
||||
echo ""
|
||||
if upgradepkg --install-new /boot/extra/elfutils-0.186-x86_64-1.txz
|
||||
then
|
||||
echo " [*] Elfutils installed and will be installed automatically on boot"
|
||||
else
|
||||
echo " [!] Error while installing, check logs..."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo " [~] Sleeping for 60 seconds before continuing..."
|
||||
echo " [i] The script is waiting until the boot process settles down."
|
||||
|
||||
for i in {60..1}; do
|
||||
printf "\r [~] %d seconds remaining" "$i"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
printf "\n"
|
||||
|
||||
create_token () {
|
||||
echo " [>] Creating new token..."
|
||||
if ${PING} -c1 ${DLS_IP} > /dev/null 2>&1
|
||||
then
|
||||
# curl --insecure -L -X GET https://${DLS_IP}:${DLS_PORT}/-/client-token -o ${TOKEN_PATH}/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok || { echo " [!] Could not get the token, please check the server."; exit 1;}
|
||||
wget -q -nc -4c --no-check-certificate --show-progress --progress=bar:force:noscroll -O "${TOKEN_PATH}"/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok https://${DLS_IP}:${DLS_PORT}/-/client-token \
|
||||
|| { echo " [!] Could not get the token, please check the server."; exit 1;}
|
||||
chmod 744 "${TOKEN_PATH}"/*.tok || { echo " [!] Could not chmod the tokens."; exit 1; }
|
||||
echo ""
|
||||
echo " [*] Token downloaded and stored in ${TOKEN_PATH}."
|
||||
else
|
||||
echo " [!] Could not get token, DLS server unavailable ."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
setup_run () {
|
||||
echo " [>] Setting up gridd.conf"
|
||||
cp /etc/nvidia/gridd.conf.template /etc/nvidia/gridd.conf || { echo " [!] Error configuring gridd.conf, did you install the drivers correctly?"; exit 1; }
|
||||
sed -i 's/FeatureType=0/FeatureType=1/g' /etc/nvidia/gridd.conf
|
||||
echo "ClientConfigTokenPath=${TOKEN_PATH}" >> /etc/nvidia/gridd.conf
|
||||
echo " [>] Creating /var/lib/nvidia folder structure"
|
||||
mkdir -p /var/lib/nvidia/GridLicensing
|
||||
echo " [>] Starting nvidia-gridd"
|
||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
||||
echo " [!] nvidia-gridd service is running. Closing."
|
||||
sh /usr/lib/nvidia/sysv/nvidia-gridd stop
|
||||
stop_exit_code=$?
|
||||
if [ $stop_exit_code -eq 0 ]; then
|
||||
echo " [*] nvidia-gridd service stopped successfully."
|
||||
else
|
||||
echo " [!] Error while stopping nvidia-gridd service."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Kill the service if it does not close
|
||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
||||
kill -9 "$(pgrep nvidia-gridd)" || {
|
||||
echo " [!] Error while closing nvidia-gridd service"
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
echo " [*] Restarting nvidia-gridd service."
|
||||
sh /usr/lib/nvidia/sysv/nvidia-gridd start
|
||||
|
||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
||||
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
|
||||
else
|
||||
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
sh /usr/lib/nvidia/sysv/nvidia-gridd start
|
||||
|
||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
||||
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
|
||||
else
|
||||
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
for token in "${TOKEN_PATH}"/*; do
|
||||
if [ "${token: -4}" == ".tok" ]
|
||||
then
|
||||
echo " [*] Tokens found..."
|
||||
setup_run
|
||||
else
|
||||
echo " [!] No Tokens found..."
|
||||
create_token
|
||||
setup_run
|
||||
fi
|
||||
done
|
||||
|
||||
while true; do
|
||||
if nvidia-smi -q | grep "Expiry" >/dev/null 2>&1; then
|
||||
echo " [>] vGPU licensed!"
|
||||
echo " [i] $(nvidia-smi -q | grep "Expiry")"
|
||||
break
|
||||
else
|
||||
echo -ne " [>] vGPU not licensed yet... Checking again in 5 seconds\c"
|
||||
for i in {1..5}; do
|
||||
sleep 1
|
||||
echo -ne ".\c"
|
||||
done
|
||||
echo -ne "\r\c"
|
||||
fi
|
||||
done
|
||||
|
||||
echo " [>] Done..."
|
||||
exit 0
|
||||
152
.gitlab-ci.yml
152
.gitlab-ci.yml
@@ -1,3 +1,10 @@
|
||||
include:
|
||||
- template: Jobs/Code-Quality.gitlab-ci.yml
|
||||
- template: Jobs/Secret-Detection.gitlab-ci.yml
|
||||
- template: Jobs/SAST.gitlab-ci.yml
|
||||
- template: Jobs/Container-Scanning.gitlab-ci.yml
|
||||
- template: Jobs/Dependency-Scanning.gitlab-ci.yml
|
||||
|
||||
cache:
|
||||
key: one-key-to-rule-them-all
|
||||
|
||||
@@ -13,26 +20,38 @@ build:docker:
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
tags: [ docker ]
|
||||
before_script:
|
||||
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env # COMMIT=`git rev-parse HEAD`
|
||||
- docker buildx inspect
|
||||
- docker buildx create --use
|
||||
script:
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
|
||||
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
|
||||
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA
|
||||
- docker buildx build --progress=plain --platform linux/amd64,linux/arm64 --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE --push .
|
||||
- docker buildx imagetools inspect $IMAGE
|
||||
- echo "CS_IMAGE=$IMAGE" > container_scanning.env
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: container_scanning.env
|
||||
|
||||
build:apt:
|
||||
image: debian:bookworm-slim
|
||||
interruptible: true
|
||||
stage: build
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
- if: $CI_COMMIT_TAG
|
||||
variables:
|
||||
VERSION: $CI_COMMIT_REF_NAME
|
||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
||||
changes:
|
||||
- app/**/*
|
||||
- .DEBIAN/**/*
|
||||
- .gitlab-ci.yml
|
||||
variables:
|
||||
VERSION: "0.0.1"
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
variables:
|
||||
VERSION: "0.0.1"
|
||||
before_script:
|
||||
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
|
||||
- source version.env
|
||||
- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
|
||||
# install build dependencies
|
||||
- apt-get update -qq && apt-get install -qq -y build-essential
|
||||
# create build directory for .deb sources
|
||||
@@ -53,7 +72,7 @@ build:apt:
|
||||
# cd into "build/"
|
||||
- cd build/
|
||||
script:
|
||||
# set version based on value in "$VERSION" (which is set above from version.env)
|
||||
# set version based on value in "$CI_COMMIT_REF_NAME"
|
||||
- sed -i -E 's/(Version\:\s)0.0/\1'"$VERSION"'/g' DEBIAN/control
|
||||
# build
|
||||
- dpkg -b . build.deb
|
||||
@@ -68,14 +87,21 @@ build:pacman:
|
||||
interruptible: true
|
||||
stage: build
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
- if: $CI_COMMIT_TAG
|
||||
variables:
|
||||
VERSION: $CI_COMMIT_REF_NAME
|
||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
||||
changes:
|
||||
- app/**/*
|
||||
- .PKGBUILD/**/*
|
||||
- .gitlab-ci.yml
|
||||
variables:
|
||||
VERSION: "0.0.1"
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
variables:
|
||||
VERSION: "0.0.1"
|
||||
before_script:
|
||||
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
|
||||
#- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
|
||||
# install build dependencies
|
||||
- pacman -Syu --noconfirm git
|
||||
# create a build-user because "makepkg" don't like root user
|
||||
@@ -90,7 +116,7 @@ build:pacman:
|
||||
# download dependencies
|
||||
- source PKGBUILD && pacman -Syu --noconfirm --needed --asdeps "${makedepends[@]}" "${depends[@]}"
|
||||
# build
|
||||
- sudo -u build makepkg -s
|
||||
- sudo --preserve-env -u build makepkg -s
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
@@ -101,6 +127,7 @@ test:
|
||||
stage: test
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
variables:
|
||||
DATABASE: sqlite:///../app/db.sqlite
|
||||
@@ -112,10 +139,11 @@ test:
|
||||
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
|
||||
- cd test
|
||||
script:
|
||||
- pytest main.py
|
||||
- python -m pytest main.py --junitxml=report.xml
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: version.env
|
||||
junit: ['**/report.xml']
|
||||
|
||||
.test:linux:
|
||||
stage: test
|
||||
@@ -179,42 +207,86 @@ test:archlinux:
|
||||
- pacman -Sy
|
||||
- pacman -U --noconfirm *.pkg.tar.zst
|
||||
|
||||
code_quality:
|
||||
rules:
|
||||
- if: $CODE_QUALITY_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
secret_detection:
|
||||
rules:
|
||||
- if: $SECRET_DETECTION_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
before_script:
|
||||
- git config --global --add safe.directory $CI_PROJECT_DIR
|
||||
|
||||
semgrep-sast:
|
||||
rules:
|
||||
- if: $SAST_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
test_coverage:
|
||||
extends: test
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
script:
|
||||
- pip install pytest pytest-cov
|
||||
- coverage run -m pytest main.py
|
||||
- coverage report
|
||||
- coverage xml
|
||||
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
|
||||
artifacts:
|
||||
reports:
|
||||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: '**/coverage.xml'
|
||||
|
||||
container_scanning:
|
||||
dependencies: [ build:docker ]
|
||||
rules:
|
||||
- if: $CONTAINER_SCANNING_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
gemnasium-python-dependency_scanning:
|
||||
rules:
|
||||
- if: $DEPENDENCY_SCANNING_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
.deploy:
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: never
|
||||
|
||||
deploy:docker:
|
||||
extends: .deploy
|
||||
stage: deploy
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
before_script:
|
||||
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
|
||||
- source version.env
|
||||
- echo "Building docker image for commit ${COMMIT} with version ${VERSION}"
|
||||
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
|
||||
script:
|
||||
- echo "GitLab-Registry"
|
||||
- echo "========== GitLab-Registry =========="
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
|
||||
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
|
||||
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
|
||||
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
|
||||
- echo "Docker-Hub"
|
||||
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME
|
||||
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME
|
||||
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest
|
||||
- docker push $IMAGE:$CI_COMMIT_REF_NAME
|
||||
- docker push $IMAGE:latest
|
||||
- echo "========== Docker-Hub =========="
|
||||
- docker login -u $PUBLIC_REGISTRY_USER -p $PUBLIC_REGISTRY_TOKEN
|
||||
- docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
|
||||
- docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
|
||||
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
|
||||
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
|
||||
- IMAGE=$PUBLIC_REGISTRY_USER/$CI_PROJECT_NAME
|
||||
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME
|
||||
- docker build . --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest
|
||||
- docker push $IMAGE:$CI_COMMIT_REF_NAME
|
||||
- docker push $IMAGE:latest
|
||||
|
||||
deploy:apt:
|
||||
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
|
||||
extends: .deploy
|
||||
image: debian:bookworm-slim
|
||||
stage: deploy
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
needs:
|
||||
- job: build:apt
|
||||
artifacts: true
|
||||
@@ -254,8 +326,6 @@ deploy:pacman:
|
||||
extends: .deploy
|
||||
image: archlinux:base-devel
|
||||
stage: deploy
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
needs:
|
||||
- job: build:pacman
|
||||
artifacts: true
|
||||
@@ -263,9 +333,9 @@ deploy:pacman:
|
||||
- source .PKGBUILD/PKGBUILD
|
||||
- source version.env
|
||||
# fastapi-dls-1.0-1-any.pkg.tar.zst
|
||||
- BUILD_NAME=${pkgname}-${VERSION}-${pkgrel}-any.pkg.tar.zst
|
||||
- BUILD_NAME=${pkgname}-${CI_COMMIT_REF_NAME}-${pkgrel}-any.pkg.tar.zst
|
||||
- PACKAGE_NAME=${pkgname}
|
||||
- PACKAGE_VERSION=${VERSION}
|
||||
- PACKAGE_VERSION=${CI_COMMIT_REF_NAME}
|
||||
- PACKAGE_ARCH=any
|
||||
- EXPORT_NAME=${BUILD_NAME}
|
||||
- 'echo "PACKAGE_NAME: ${PACKAGE_NAME}"'
|
||||
@@ -277,19 +347,15 @@ deploy:pacman:
|
||||
release:
|
||||
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
stage: .post
|
||||
needs:
|
||||
- job: test
|
||||
artifacts: true
|
||||
needs: [ test ]
|
||||
rules:
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
script:
|
||||
- echo "Running release-job for $VERSION"
|
||||
- echo "Running release-job for $CI_COMMIT_TAG"
|
||||
release:
|
||||
name: $CI_PROJECT_TITLE $VERSION
|
||||
description: Release of $CI_PROJECT_TITLE version $VERSION
|
||||
tag_name: $VERSION
|
||||
name: $CI_PROJECT_TITLE $CI_COMMIT_TAG
|
||||
description: Release of $CI_PROJECT_TITLE version $CI_COMMIT_TAG
|
||||
tag_name: $CI_COMMIT_TAG
|
||||
ref: $CI_COMMIT_SHA
|
||||
assets:
|
||||
links:
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
FROM python:3.11-alpine
|
||||
|
||||
ARG VERSION
|
||||
ARG COMMIT=""
|
||||
RUN echo -e "VERSION=$VERSION\nCOMMIT=$COMMIT" > /version.env
|
||||
|
||||
COPY requirements.txt /tmp/requirements.txt
|
||||
|
||||
RUN apk update \
|
||||
@@ -11,7 +15,6 @@ RUN apk update \
|
||||
&& apk del build-deps
|
||||
|
||||
COPY app /app
|
||||
COPY version.env /version.env
|
||||
COPY README.md /README.md
|
||||
|
||||
HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/-/health || exit 1
|
||||
|
||||
66
README.md
66
README.md
@@ -9,9 +9,9 @@ Only the clients need a connection to this service on configured port.
|
||||
|
||||
**Official Links**
|
||||
|
||||
- https://git.collinwebdesigns.de/oscar.krause/fastapi-dls
|
||||
- https://gitea.publichub.eu/oscar.krause/fastapi-dls
|
||||
- Docker Image `collinwebdesigns/fastapi-dls:latest`
|
||||
- https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
|
||||
- https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
|
||||
- https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
|
||||
|
||||
*All other repositories are forks! (which is no bad - just for information and bug reports)*
|
||||
|
||||
@@ -39,6 +39,8 @@ Docker-Images are available here:
|
||||
- [Docker-Hub](https://hub.docker.com/repository/docker/collinwebdesigns/fastapi-dls): `collinwebdesigns/fastapi-dls:latest`
|
||||
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry): `registry.git.collinwebdesigns.de/oscar.krause/fastapi-dls/main:latest`
|
||||
|
||||
The images include database drivers for `postgres`, `mysql`, `mariadb` and `sqlite`.
|
||||
|
||||
**Run this on the Docker-Host**
|
||||
|
||||
```shell
|
||||
@@ -143,9 +145,9 @@ This is only to test whether the service starts successfully.
|
||||
|
||||
```shell
|
||||
cd /opt/fastapi-dls/app
|
||||
su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
|
||||
sudo -u www-data /opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app
|
||||
# or
|
||||
sudo -u www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
|
||||
su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
|
||||
```
|
||||
|
||||
**Create config file**
|
||||
@@ -245,6 +247,8 @@ This is only to test whether the service starts successfully.
|
||||
BASE_DIR=/opt/fastapi-dls
|
||||
SERVICE_USER=dls
|
||||
cd ${BASE_DIR}
|
||||
sudo -u ${SERVICE_USER} ${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app
|
||||
# or
|
||||
su - ${SERVICE_USER} -c "${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app"
|
||||
```
|
||||
|
||||
@@ -306,7 +310,7 @@ Packages are available here:
|
||||
|
||||
Successful tested with:
|
||||
|
||||
- Debian 12 (Bookworm) (works but not recommended because it is currently in *testing* state)
|
||||
- Debian 12 (Bookworm)
|
||||
- Ubuntu 22.10 (Kinetic Kudu)
|
||||
|
||||
Not working with:
|
||||
@@ -350,6 +354,19 @@ pacman -U --noconfirm fastapi-dls.pkg.tar.zst
|
||||
|
||||
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
||||
|
||||
## unRAID
|
||||
|
||||
1. Download [this xml file](.UNRAID/FastAPI-DLS.xml)
|
||||
2. Put it in /boot/config/plugins/dockerMan/templates-user/
|
||||
3. Go to Docker page, scroll down to `Add Container`, click on Template list and choose `FastAPI-DLS`
|
||||
4. Open terminal/ssh, follow the instructions in overview description
|
||||
5. Setup your container `IP`, `Port`, `DLS_URL` and `DLS_PORT`
|
||||
6. Apply and let it boot up
|
||||
|
||||
*Unraid users must also make sure they have Host access to custom networks enabled if unraid is the vgpu guest*.
|
||||
|
||||
Continue [here](#unraid-guest) for docker guest setup.
|
||||
|
||||
## Let's Encrypt Certificate (optional)
|
||||
|
||||
If you're using installation via docker, you can use `traefik`. Please refer to their documentation.
|
||||
@@ -388,9 +405,9 @@ After first success you have to replace `--issue` with `--renew`.
|
||||
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
|
||||
client has 19.2 hours in which to re-establish connectivity before its license expires.
|
||||
|
||||
\*2 Always use `https`, since guest-drivers only support secure connections!
|
||||
\*3 Always use `https`, since guest-drivers only support secure connections!
|
||||
|
||||
\*3 If you recreate instance keys you need to **recreate client-token for each guest**!
|
||||
\*4 If you recreate instance keys you need to **recreate client-token for each guest**!
|
||||
|
||||
# Setup (Client)
|
||||
|
||||
@@ -398,9 +415,15 @@ client has 19.2 hours in which to re-establish connectivity before its license e
|
||||
|
||||
Successfully tested with this package versions:
|
||||
|
||||
- `14.3` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `513.91`)
|
||||
- `14.4` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `514.08`)
|
||||
- `15.0` (Linux-Host: `525.60.12`, Linux-Guest: `525.60.13`, Windows-Guest: `527.41`)
|
||||
| vGPU Suftware | vGPU Manager | Linux Driver | Windows Driver | Release Date |
|
||||
|---------------|--------------|--------------|----------------|---------------|
|
||||
| `15.2` | `525.105.14` | `525.105.17` | `528.89` | March 2023 |
|
||||
| `15.1` | `525.85.07` | `525.85.05` | `528.24` | January 2023 |
|
||||
| `15.0` | `525.60.12` | `525.60.13` | `527.41` | December 2022 |
|
||||
| `14.4` | `510.108.03` | `510.108.03` | `514.08` | December 2022 |
|
||||
| `14.3` | `510.108.03` | `510.108.03` | `513.91` | November 2022 |
|
||||
|
||||
- https://docs.nvidia.com/grid/index.html
|
||||
|
||||
## Linux
|
||||
|
||||
@@ -452,7 +475,7 @@ Restart-Service NVDisplay.ContainerLocalSystem
|
||||
Check licensing status:
|
||||
|
||||
```shell
|
||||
& 'C:\Program Files\NVIDIA Corporation\NVSMI\nvidia-smi.exe' -q | Select-String "License"
|
||||
& 'nvidia-smi' -q | Select-String "License"
|
||||
```
|
||||
|
||||
Output should be something like:
|
||||
@@ -464,6 +487,19 @@ vGPU Software Licensed Product
|
||||
|
||||
Done. For more information check [troubleshoot section](#troubleshoot).
|
||||
|
||||
## unRAID Guest
|
||||
|
||||
1. Make sure you create a folder in a linux filesystem (BTRFS/XFS/EXT4...), I recommend `/mnt/user/system/nvidia` (this is where docker and libvirt preferences are saved, so it's a good place to have that)
|
||||
2. Edit the script to put your `DLS_IP`, `DLS_PORT` and `TOKEN_PATH`, properly
|
||||
3. Install `User Scripts` plugin from *Community Apps* (the Apps page, or google User Scripts Unraid if you're not using CA)
|
||||
4. Go to `Settings > Users Scripts > Add New Script`
|
||||
5. Give it a name (the name must not contain spaces preferably)
|
||||
6. Click on the *gear icon* to the left of the script name then edit script
|
||||
7. Paste the script and save
|
||||
8. Set schedule to `At First Array Start Only`
|
||||
9. Click on Apply
|
||||
|
||||
|
||||
# Endpoints
|
||||
|
||||
### `GET /`
|
||||
@@ -666,4 +702,8 @@ The error message can safely be ignored (since we have no license limitation :P)
|
||||
|
||||
Thanks to vGPU community and all who uses this project and report bugs.
|
||||
|
||||
Special thanks to @samicrusader who created build file for ArchLinux and @cyrus who wrote the section for openSUSE.
|
||||
Special thanks to
|
||||
|
||||
- @samicrusader who created build file for ArchLinux
|
||||
- @cyrus who wrote the section for openSUSE
|
||||
- @midi who wrote the section for unRAID
|
||||
|
||||
27
ROADMAP.md
Normal file
27
ROADMAP.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Roadmap
|
||||
|
||||
I am planning to implement the following features in the future.
|
||||
|
||||
|
||||
## HA - High Availability
|
||||
|
||||
Support Failover-Mode (secondary ip address) as in official DLS.
|
||||
|
||||
**Note**: There is no Load-Balancing / Round-Robin HA Mode supported! If you want to use that, consider to use
|
||||
Docker-Swarm with shared/cluster database (e.g. postgres).
|
||||
|
||||
*See [ha branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ha) for current status.*
|
||||
|
||||
|
||||
## UI - User Interface
|
||||
|
||||
Add a user interface to manage origins and leases.
|
||||
|
||||
*See [ui branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ui) for current status.*
|
||||
|
||||
|
||||
## Config Database
|
||||
|
||||
Instead of using environment variables, configuration files and manually create certificates, store configs and
|
||||
certificates in database (like origins and leases). Also, there should be provided a startup assistant to prefill
|
||||
required attributes and create instance-certificates. This is more user-friendly and should improve fist setup.
|
||||
177
app/main.py
177
app/main.py
@@ -9,48 +9,42 @@ from dotenv import load_dotenv
|
||||
from fastapi import FastAPI
|
||||
from fastapi.requests import Request
|
||||
from json import loads as json_loads
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from calendar import timegm
|
||||
from jose import jws, jwk, jwt, JWTError
|
||||
from jose import jws, jwt, JWTError
|
||||
from jose.constants import ALGORITHMS
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from util import load_key, load_file
|
||||
from orm import Origin, Lease, init as db_init, migrate
|
||||
from orm import init as db_init, migrate, Site, Instance, Origin, Lease
|
||||
|
||||
load_dotenv('../version.env')
|
||||
|
||||
# get local timezone
|
||||
TZ = datetime.now().astimezone().tzinfo
|
||||
|
||||
# fetch version info
|
||||
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
|
||||
|
||||
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
|
||||
# fastapi setup
|
||||
config = dict(openapi_url='/-/openapi.json', docs_url=None, redoc_url=None)
|
||||
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
|
||||
|
||||
# database setup
|
||||
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
|
||||
db_init(db), migrate(db)
|
||||
|
||||
# everything prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service
|
||||
# DLS setup (static)
|
||||
DLS_URL = str(env('DLS_URL', 'localhost'))
|
||||
DLS_PORT = int(env('DLS_PORT', '443'))
|
||||
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
|
||||
INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001'))
|
||||
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001'))
|
||||
INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
|
||||
INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
|
||||
TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0)))
|
||||
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
|
||||
LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
|
||||
LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
|
||||
CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12)
|
||||
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
|
||||
|
||||
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo
|
||||
|
||||
# fastapi middleware
|
||||
app.debug = DEBUG
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
@@ -60,12 +54,25 @@ app.add_middleware(
|
||||
allow_headers=['*'],
|
||||
)
|
||||
|
||||
# logging
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
|
||||
|
||||
|
||||
def __get_token(request: Request) -> dict:
|
||||
def validate_settings():
|
||||
session = sessionmaker(bind=db)()
|
||||
|
||||
lease_expire_delta_min, lease_expire_delta_max = 86_400, 7_776_000
|
||||
for instance in session.query(Instance).all():
|
||||
lease_expire_delta = instance.lease_expire_delta
|
||||
if lease_expire_delta < 86_400 or lease_expire_delta > 7_776_000:
|
||||
logging.warning(f'> [ instance ]: {instance.instance_ref}: "lease_expire_delta" should be between {lease_expire_delta_min} and {lease_expire_delta_max}')
|
||||
|
||||
session.close()
|
||||
|
||||
|
||||
def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
|
||||
authorization_header = request.headers.get('authorization')
|
||||
token = authorization_header.split(' ')[1]
|
||||
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
||||
@@ -82,24 +89,26 @@ async def _index():
|
||||
|
||||
|
||||
@app.get('/-/health', summary='* Health')
|
||||
async def _health(request: Request):
|
||||
async def _health():
|
||||
return JSONr({'status': 'up'})
|
||||
|
||||
|
||||
@app.get('/-/config', summary='* Config', description='returns environment variables.')
|
||||
async def _config():
|
||||
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
|
||||
|
||||
return JSONr({
|
||||
'VERSION': str(VERSION),
|
||||
'COMMIT': str(COMMIT),
|
||||
'DEBUG': str(DEBUG),
|
||||
'DLS_URL': str(DLS_URL),
|
||||
'DLS_PORT': str(DLS_PORT),
|
||||
'SITE_KEY_XID': str(SITE_KEY_XID),
|
||||
'INSTANCE_REF': str(INSTANCE_REF),
|
||||
'SITE_KEY_XID': str(default_site.site_key),
|
||||
'INSTANCE_REF': str(default_instance.instance_ref),
|
||||
'ALLOTMENT_REF': [str(ALLOTMENT_REF)],
|
||||
'TOKEN_EXPIRE_DELTA': str(TOKEN_EXPIRE_DELTA),
|
||||
'LEASE_EXPIRE_DELTA': str(LEASE_EXPIRE_DELTA),
|
||||
'LEASE_RENEWAL_PERIOD': str(LEASE_RENEWAL_PERIOD),
|
||||
'TOKEN_EXPIRE_DELTA': str(default_instance.get_token_expire_delta()),
|
||||
'LEASE_EXPIRE_DELTA': str(default_instance.get_lease_expire_delta()),
|
||||
'LEASE_RENEWAL_PERIOD': str(default_instance.lease_renewal_period),
|
||||
'CORS_ORIGINS': str(CORS_ORIGINS),
|
||||
'TZ': str(TZ),
|
||||
})
|
||||
@@ -108,6 +117,8 @@ async def _config():
|
||||
@app.get('/-/readme', summary='* Readme')
|
||||
async def _readme():
|
||||
from markdown import markdown
|
||||
from util import load_file
|
||||
|
||||
content = load_file('../README.md').decode('utf-8')
|
||||
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
|
||||
|
||||
@@ -157,8 +168,7 @@ async def _origins(request: Request, leases: bool = False):
|
||||
for origin in session.query(Origin).all():
|
||||
x = origin.serialize()
|
||||
if leases:
|
||||
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
|
||||
x['leases'] = list(map(lambda _: _.serialize(**serialize), Lease.find_by_origin_ref(db, origin.origin_ref)))
|
||||
x['leases'] = list(map(lambda _: _.serialize(), Lease.find_by_origin_ref(db, origin.origin_ref)))
|
||||
response.append(x)
|
||||
session.close()
|
||||
return JSONr(response)
|
||||
@@ -175,8 +185,7 @@ async def _leases(request: Request, origin: bool = False):
|
||||
session = sessionmaker(bind=db)()
|
||||
response = []
|
||||
for lease in session.query(Lease).all():
|
||||
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
|
||||
x = lease.serialize(**serialize)
|
||||
x = lease.serialize()
|
||||
if origin:
|
||||
lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first()
|
||||
if lease_origin is not None:
|
||||
@@ -186,6 +195,12 @@ async def _leases(request: Request, origin: bool = False):
|
||||
return JSONr(response)
|
||||
|
||||
|
||||
@app.delete('/-/leases/expired', summary='* Leases')
|
||||
async def _lease_delete_expired(request: Request):
|
||||
Lease.delete_expired(db)
|
||||
return Response(status_code=201)
|
||||
|
||||
|
||||
@app.delete('/-/lease/{lease_ref}', summary='* Lease')
|
||||
async def _lease_delete(request: Request, lease_ref: str):
|
||||
if Lease.delete(db, lease_ref) == 1:
|
||||
@@ -197,7 +212,13 @@ async def _lease_delete(request: Request, lease_ref: str):
|
||||
@app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance')
|
||||
async def _client_token():
|
||||
cur_time = datetime.utcnow()
|
||||
exp_time = cur_time + CLIENT_TOKEN_EXPIRE_DELTA
|
||||
|
||||
default_instance = Instance.get_default_instance(db)
|
||||
public_key = default_instance.get_public_key()
|
||||
# todo: implemented request parameter to support different instances
|
||||
jwt_encode_key = default_instance.get_jwt_encode_key()
|
||||
|
||||
exp_time = cur_time + default_instance.get_client_token_expire_delta()
|
||||
|
||||
payload = {
|
||||
"jti": str(uuid4()),
|
||||
@@ -210,7 +231,7 @@ async def _client_token():
|
||||
"scope_ref_list": [ALLOTMENT_REF],
|
||||
"fulfillment_class_ref_list": [],
|
||||
"service_instance_configuration": {
|
||||
"nls_service_instance_ref": INSTANCE_REF,
|
||||
"nls_service_instance_ref": default_instance.instance_ref,
|
||||
"svc_port_set_list": [
|
||||
{
|
||||
"idx": 0,
|
||||
@@ -222,10 +243,10 @@ async def _client_token():
|
||||
},
|
||||
"service_instance_public_key_configuration": {
|
||||
"service_instance_public_key_me": {
|
||||
"mod": hex(INSTANCE_KEY_PUB.public_key().n)[2:],
|
||||
"exp": int(INSTANCE_KEY_PUB.public_key().e),
|
||||
"mod": hex(public_key.public_key().n)[2:],
|
||||
"exp": int(public_key.public_key().e),
|
||||
},
|
||||
"service_instance_public_key_pem": INSTANCE_KEY_PUB.export_key().decode('utf-8'),
|
||||
"service_instance_public_key_pem": public_key.export_key().decode('utf-8'),
|
||||
"key_retention_mode": "LATEST_ONLY"
|
||||
},
|
||||
}
|
||||
@@ -307,13 +328,16 @@ async def auth_v1_code(request: Request):
|
||||
delta = relativedelta(minutes=15)
|
||||
expires = cur_time + delta
|
||||
|
||||
default_site = Site.get_default_site(db)
|
||||
jwt_encode_key = Instance.get_default_instance(db).get_jwt_encode_key()
|
||||
|
||||
payload = {
|
||||
'iat': timegm(cur_time.timetuple()),
|
||||
'exp': timegm(expires.timetuple()),
|
||||
'challenge': j.get('code_challenge'),
|
||||
'origin_ref': j.get('origin_ref'),
|
||||
'key_ref': SITE_KEY_XID,
|
||||
'kid': SITE_KEY_XID
|
||||
'key_ref': default_site.site_key,
|
||||
'kid': default_site.site_key,
|
||||
}
|
||||
|
||||
auth_code = jws.sign(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
|
||||
@@ -333,8 +357,11 @@ async def auth_v1_code(request: Request):
|
||||
async def auth_v1_token(request: Request):
|
||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
||||
|
||||
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
|
||||
jwt_encode_key, jwt_decode_key = default_instance.get_jwt_encode_key(), default_instance.get_jwt_decode_key()
|
||||
|
||||
try:
|
||||
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key)
|
||||
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key, algorithms=[ALGORITHMS.RS256])
|
||||
except JWTError as e:
|
||||
return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)})
|
||||
|
||||
@@ -346,7 +373,7 @@ async def auth_v1_token(request: Request):
|
||||
if payload.get('challenge') != challenge:
|
||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'})
|
||||
|
||||
access_expires_on = cur_time + TOKEN_EXPIRE_DELTA
|
||||
access_expires_on = cur_time + default_instance.get_token_expire_delta()
|
||||
|
||||
new_payload = {
|
||||
'iat': timegm(cur_time.timetuple()),
|
||||
@@ -355,8 +382,8 @@ async def auth_v1_token(request: Request):
|
||||
'aud': 'https://cls.nvidia.org',
|
||||
'exp': timegm(access_expires_on.timetuple()),
|
||||
'origin_ref': origin_ref,
|
||||
'key_ref': SITE_KEY_XID,
|
||||
'kid': SITE_KEY_XID,
|
||||
'key_ref': default_site.site_key,
|
||||
'kid': default_site.site_key,
|
||||
}
|
||||
|
||||
auth_token = jwt.encode(new_payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
|
||||
@@ -373,10 +400,13 @@ async def auth_v1_token(request: Request):
|
||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
|
||||
@app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin')
|
||||
async def leasing_v1_lessor(request: Request):
|
||||
j, token, cur_time = json_loads((await request.body()).decode('utf-8')), __get_token(request), datetime.utcnow()
|
||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
||||
|
||||
default_instance = Instance.get_default_instance(db)
|
||||
jwt_decode_key = default_instance.get_jwt_decode_key()
|
||||
|
||||
try:
|
||||
token = __get_token(request)
|
||||
token = __get_token(request, jwt_decode_key)
|
||||
except JWTError:
|
||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
|
||||
|
||||
@@ -390,7 +420,7 @@ async def leasing_v1_lessor(request: Request):
|
||||
# return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]')
|
||||
|
||||
lease_ref = str(uuid4())
|
||||
expires = cur_time + LEASE_EXPIRE_DELTA
|
||||
expires = cur_time + default_instance.get_lease_expire_delta()
|
||||
lease_result_list.append({
|
||||
"ordinal": 0,
|
||||
# https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html
|
||||
@@ -398,13 +428,13 @@ async def leasing_v1_lessor(request: Request):
|
||||
"ref": lease_ref,
|
||||
"created": cur_time.isoformat(),
|
||||
"expires": expires.isoformat(),
|
||||
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
|
||||
"recommended_lease_renewal": default_instance.lease_renewal_period,
|
||||
"offline_lease": "true",
|
||||
"license_type": "CONCURRENT_COUNTED_SINGLE"
|
||||
}
|
||||
})
|
||||
|
||||
data = Lease(origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
|
||||
data = Lease(instance_ref=default_instance.instance_ref, origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
|
||||
Lease.create_or_update(db, data)
|
||||
|
||||
response = {
|
||||
@@ -421,7 +451,14 @@ async def leasing_v1_lessor(request: Request):
|
||||
# venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql
|
||||
@app.get('/leasing/v1/lessor/leases', description='get active leases for current origin')
|
||||
async def leasing_v1_lessor_lease(request: Request):
|
||||
token, cur_time = __get_token(request), datetime.utcnow()
|
||||
cur_time = datetime.utcnow()
|
||||
|
||||
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
|
||||
|
||||
try:
|
||||
token = __get_token(request, jwt_decode_key)
|
||||
except JWTError:
|
||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
|
||||
|
||||
origin_ref = token.get('origin_ref')
|
||||
|
||||
@@ -441,7 +478,15 @@ async def leasing_v1_lessor_lease(request: Request):
|
||||
# venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py
|
||||
@app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease')
|
||||
async def leasing_v1_lease_renew(request: Request, lease_ref: str):
|
||||
token, cur_time = __get_token(request), datetime.utcnow()
|
||||
cur_time = datetime.utcnow()
|
||||
|
||||
default_instance = Instance.get_default_instance(db)
|
||||
jwt_decode_key = default_instance.get_jwt_decode_key()
|
||||
|
||||
try:
|
||||
token = __get_token(request, jwt_decode_key)
|
||||
except JWTError:
|
||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
|
||||
|
||||
origin_ref = token.get('origin_ref')
|
||||
logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
|
||||
@@ -450,11 +495,11 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
|
||||
if entity is None:
|
||||
return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'})
|
||||
|
||||
expires = cur_time + LEASE_EXPIRE_DELTA
|
||||
expires = cur_time + default_instance.get_lease_expire_delta()
|
||||
response = {
|
||||
"lease_ref": lease_ref,
|
||||
"expires": expires.isoformat(),
|
||||
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
|
||||
"recommended_lease_renewal": default_instance.lease_renewal_period,
|
||||
"offline_lease": True,
|
||||
"prompts": None,
|
||||
"sync_timestamp": cur_time.isoformat(),
|
||||
@@ -468,7 +513,14 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
|
||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py
|
||||
@app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease')
|
||||
async def leasing_v1_lease_delete(request: Request, lease_ref: str):
|
||||
token, cur_time = __get_token(request), datetime.utcnow()
|
||||
cur_time = datetime.utcnow()
|
||||
|
||||
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
|
||||
|
||||
try:
|
||||
token = __get_token(request, jwt_decode_key)
|
||||
except JWTError:
|
||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
|
||||
|
||||
origin_ref = token.get('origin_ref')
|
||||
logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
|
||||
@@ -494,7 +546,14 @@ async def leasing_v1_lease_delete(request: Request, lease_ref: str):
|
||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
|
||||
@app.delete('/leasing/v1/lessor/leases', description='release all leases')
|
||||
async def leasing_v1_lessor_lease_remove(request: Request):
|
||||
token, cur_time = __get_token(request), datetime.utcnow()
|
||||
cur_time = datetime.utcnow()
|
||||
|
||||
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
|
||||
|
||||
try:
|
||||
token = __get_token(request, jwt_decode_key)
|
||||
except JWTError:
|
||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
|
||||
|
||||
origin_ref = token.get('origin_ref')
|
||||
|
||||
@@ -516,6 +575,8 @@ async def leasing_v1_lessor_lease_remove(request: Request):
|
||||
async def leasing_v1_lessor_shutdown(request: Request):
|
||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
||||
|
||||
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
|
||||
|
||||
token = j.get('token')
|
||||
token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
||||
origin_ref = token.get('origin_ref')
|
||||
@@ -536,15 +597,23 @@ async def leasing_v1_lessor_shutdown(request: Request):
|
||||
|
||||
@app.on_event('startup')
|
||||
async def app_on_startup():
|
||||
default_instance = Instance.get_default_instance(db)
|
||||
|
||||
lease_renewal_period = default_instance.lease_renewal_period
|
||||
lease_renewal_delta = default_instance.get_lease_renewal_delta()
|
||||
client_token_expire_delta = default_instance.get_client_token_expire_delta()
|
||||
|
||||
logger.info(f'''
|
||||
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
|
||||
|
||||
Your clients renew their license every {str(Lease.calculate_renewal(LEASE_RENEWAL_PERIOD, LEASE_RENEWAL_DELTA))}.
|
||||
If the renewal fails, the license is {str(LEASE_RENEWAL_DELTA)} valid.
|
||||
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
|
||||
If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
|
||||
|
||||
Your client-token file (.tok) is valid for {str(CLIENT_TOKEN_EXPIRE_DELTA)}.
|
||||
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
|
||||
''')
|
||||
|
||||
validate_settings()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import uvicorn
|
||||
|
||||
259
app/orm.py
259
app/orm.py
@@ -1,18 +1,143 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text
|
||||
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text, BLOB, INT, FLOAT
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
|
||||
|
||||
from app.util import parse_key
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class Site(Base):
|
||||
__tablename__ = "site"
|
||||
|
||||
INITIAL_SITE_KEY_XID = '00000000-0000-0000-0000-000000000000'
|
||||
INITIAL_SITE_NAME = 'default'
|
||||
|
||||
site_key = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, SITE_KEY_XID
|
||||
name = Column(VARCHAR(length=256), nullable=False)
|
||||
|
||||
def __str__(self):
|
||||
return f'SITE_KEY_XID: {self.site_key}'
|
||||
|
||||
@staticmethod
|
||||
def create_statement(engine: Engine):
|
||||
from sqlalchemy.schema import CreateTable
|
||||
return CreateTable(Site.__table__).compile(engine)
|
||||
|
||||
@staticmethod
|
||||
def get_default_site(engine: Engine) -> "Site":
|
||||
session = sessionmaker(bind=engine)()
|
||||
entity = session.query(Site).filter(Site.site_key == Site.INITIAL_SITE_KEY_XID).first()
|
||||
session.close()
|
||||
return entity
|
||||
|
||||
|
||||
class Instance(Base):
|
||||
__tablename__ = "instance"
|
||||
|
||||
DEFAULT_INSTANCE_REF = '10000000-0000-0000-0000-000000000001'
|
||||
DEFAULT_TOKEN_EXPIRE_DELTA = 86_400 # 1 day
|
||||
DEFAULT_LEASE_EXPIRE_DELTA = 7_776_000 # 90 days
|
||||
DEFAULT_LEASE_RENEWAL_PERIOD = 0.15
|
||||
DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA = 378_432_000 # 12 years
|
||||
# 1 day = 86400 (min. in production setup, max 90 days), 1 hour = 3600
|
||||
|
||||
instance_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, INSTANCE_REF
|
||||
site_key = Column(CHAR(length=36), ForeignKey(Site.site_key, ondelete='CASCADE'), nullable=False, index=True) # uuid4
|
||||
private_key = Column(BLOB(length=2048), nullable=False)
|
||||
public_key = Column(BLOB(length=512), nullable=False)
|
||||
token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_TOKEN_EXPIRE_DELTA, comment='in seconds')
|
||||
lease_expire_delta = Column(INT(), nullable=False, default=DEFAULT_LEASE_EXPIRE_DELTA, comment='in seconds')
|
||||
lease_renewal_period = Column(FLOAT(precision=2), nullable=False, default=DEFAULT_LEASE_RENEWAL_PERIOD)
|
||||
client_token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA, comment='in seconds')
|
||||
|
||||
__origin = relationship(Site, foreign_keys=[site_key])
|
||||
|
||||
def __str__(self):
|
||||
return f'INSTANCE_REF: {self.instance_ref} (SITE_KEY_XID: {self.site_key})'
|
||||
|
||||
@staticmethod
|
||||
def create_statement(engine: Engine):
|
||||
from sqlalchemy.schema import CreateTable
|
||||
return CreateTable(Instance.__table__).compile(engine)
|
||||
|
||||
@staticmethod
|
||||
def create_or_update(engine: Engine, instance: "Instance"):
|
||||
session = sessionmaker(bind=engine)()
|
||||
entity = session.query(Instance).filter(Instance.instance_ref == instance.instance_ref).first()
|
||||
if entity is None:
|
||||
session.add(instance)
|
||||
else:
|
||||
x = dict(
|
||||
site_key=instance.site_key,
|
||||
private_key=instance.private_key,
|
||||
public_key=instance.public_key,
|
||||
token_expire_delta=instance.token_expire_delta,
|
||||
lease_expire_delta=instance.lease_expire_delta,
|
||||
lease_renewal_period=instance.lease_renewal_period,
|
||||
client_token_expire_delta=instance.client_token_expire_delta,
|
||||
)
|
||||
session.execute(update(Instance).where(Instance.instance_ref == instance.instance_ref).values(**x))
|
||||
session.commit()
|
||||
session.flush()
|
||||
session.close()
|
||||
|
||||
# todo: validate on startup that "lease_expire_delta" is between 1 day and 90 days
|
||||
|
||||
@staticmethod
|
||||
def get_default_instance(engine: Engine) -> "Instance":
|
||||
session = sessionmaker(bind=engine)()
|
||||
site = Site.get_default_site(engine)
|
||||
entity = session.query(Instance).filter(Instance.site_key == site.site_key).first()
|
||||
session.close()
|
||||
return entity
|
||||
|
||||
def get_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
|
||||
return relativedelta(seconds=self.token_expire_delta)
|
||||
|
||||
def get_lease_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
|
||||
return relativedelta(seconds=self.lease_expire_delta)
|
||||
|
||||
def get_lease_renewal_delta(self) -> "datetime.timedelta":
|
||||
return timedelta(seconds=self.lease_expire_delta)
|
||||
|
||||
def get_client_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
|
||||
return relativedelta(seconds=self.client_token_expire_delta)
|
||||
|
||||
def __get_private_key(self) -> "RsaKey":
|
||||
return parse_key(self.private_key)
|
||||
|
||||
def get_public_key(self) -> "RsaKey":
|
||||
return parse_key(self.public_key)
|
||||
|
||||
def get_jwt_encode_key(self) -> "jose.jkw":
|
||||
from jose import jwk
|
||||
from jose.constants import ALGORITHMS
|
||||
return jwk.construct(self.__get_private_key().export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
|
||||
def get_jwt_decode_key(self) -> "jose.jwt":
|
||||
from jose import jwk
|
||||
from jose.constants import ALGORITHMS
|
||||
return jwk.construct(self.get_public_key().export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
|
||||
def get_private_key_str(self, encoding: str = 'utf-8') -> str:
|
||||
return self.private_key.decode(encoding)
|
||||
|
||||
def get_public_key_str(self, encoding: str = 'utf-8') -> str:
|
||||
return self.private_key.decode(encoding)
|
||||
|
||||
|
||||
class Origin(Base):
|
||||
__tablename__ = "origin"
|
||||
|
||||
origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4
|
||||
|
||||
# service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF')
|
||||
hostname = Column(VARCHAR(length=256), nullable=True)
|
||||
guest_driver_version = Column(VARCHAR(length=10), nullable=True)
|
||||
@@ -70,18 +195,24 @@ class Origin(Base):
|
||||
class Lease(Base):
|
||||
__tablename__ = "lease"
|
||||
|
||||
instance_ref = Column(CHAR(length=36), ForeignKey(Instance.instance_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
|
||||
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
|
||||
|
||||
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
|
||||
# scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF')
|
||||
lease_created = Column(DATETIME(), nullable=False)
|
||||
lease_expires = Column(DATETIME(), nullable=False)
|
||||
lease_updated = Column(DATETIME(), nullable=False)
|
||||
|
||||
__instance = relationship(Instance, foreign_keys=[instance_ref])
|
||||
__origin = relationship(Origin, foreign_keys=[origin_ref])
|
||||
|
||||
def __repr__(self):
|
||||
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
|
||||
|
||||
def serialize(self, renewal_period: float, renewal_delta: timedelta) -> dict:
|
||||
def serialize(self) -> dict:
|
||||
renewal_period = self.__instance.lease_renewal_period
|
||||
renewal_delta = self.__instance.get_lease_renewal_delta
|
||||
|
||||
lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds())
|
||||
lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal)
|
||||
|
||||
@@ -160,6 +291,14 @@ class Lease(Base):
|
||||
session.close()
|
||||
return deletions
|
||||
|
||||
@staticmethod
|
||||
def delete_expired(engine: Engine) -> int:
|
||||
session = sessionmaker(bind=engine)()
|
||||
deletions = session.query(Lease).filter(Lease.lease_expires <= datetime.utcnow()).delete()
|
||||
session.commit()
|
||||
session.close()
|
||||
return deletions
|
||||
|
||||
@staticmethod
|
||||
def calculate_renewal(renewal_period: float, delta: timedelta) -> timedelta:
|
||||
"""
|
||||
@@ -183,38 +322,104 @@ class Lease(Base):
|
||||
return renew
|
||||
|
||||
|
||||
def init_default_site(session: Session):
|
||||
from uuid import uuid4
|
||||
from app.util import generate_key
|
||||
|
||||
private_key = generate_key()
|
||||
public_key = private_key.public_key()
|
||||
|
||||
site = Site(
|
||||
site_key=Site.INITIAL_SITE_KEY_XID,
|
||||
name=Site.INITIAL_SITE_NAME
|
||||
)
|
||||
session.add(site)
|
||||
session.commit()
|
||||
|
||||
instance = Instance(
|
||||
instance_ref=Instance.DEFAULT_INSTANCE_REF,
|
||||
site_key=site.site_key,
|
||||
private_key=private_key.export_key(),
|
||||
public_key=public_key.export_key(),
|
||||
)
|
||||
session.add(instance)
|
||||
session.commit()
|
||||
|
||||
|
||||
def init(engine: Engine):
|
||||
tables = [Origin, Lease]
|
||||
tables = [Site, Instance, Origin, Lease]
|
||||
db = inspect(engine)
|
||||
session = sessionmaker(bind=engine)()
|
||||
for table in tables:
|
||||
if not db.dialect.has_table(engine.connect(), table.__tablename__):
|
||||
exists = db.dialect.has_table(engine.connect(), table.__tablename__)
|
||||
logger.info(f'> Table "{table.__tablename__:<16}" exists: {exists}')
|
||||
if not exists:
|
||||
session.execute(text(str(table.create_statement(engine))))
|
||||
session.commit()
|
||||
|
||||
# create default site
|
||||
cnt = session.query(Site).count()
|
||||
if cnt == 0:
|
||||
init_default_site(session)
|
||||
|
||||
session.flush()
|
||||
session.close()
|
||||
|
||||
|
||||
def migrate(engine: Engine):
|
||||
from os import getenv as env
|
||||
from os.path import join, dirname, isfile
|
||||
from util import load_key
|
||||
|
||||
db = inspect(engine)
|
||||
|
||||
def upgrade_1_0_to_1_1():
|
||||
x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
|
||||
x = next(_ for _ in x if _['name'] == 'origin_ref')
|
||||
if x['primary_key'] > 0:
|
||||
print('Found old database schema with "origin_ref" as primary-key in "lease" table. Dropping table!')
|
||||
print(' Your leases are recreated on next renewal!')
|
||||
print(' If an error message appears on the client, you can ignore it.')
|
||||
Lease.__table__.drop(bind=engine)
|
||||
init(engine)
|
||||
# todo: add update guide to use 1.LATEST to 2.0
|
||||
def upgrade_1_x_to_2_0():
|
||||
site = Site.get_default_site(engine)
|
||||
logger.info(site)
|
||||
instance = Instance.get_default_instance(engine)
|
||||
logger.info(instance)
|
||||
|
||||
# def upgrade_1_2_to_1_3():
|
||||
# x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
|
||||
# x = next((_ for _ in x if _['name'] == 'scope_ref'), None)
|
||||
# if x is None:
|
||||
# Lease.scope_ref.compile()
|
||||
# column_name = Lease.scope_ref.name
|
||||
# column_type = Lease.scope_ref.type.compile(engine.dialect)
|
||||
# engine.execute(f'ALTER TABLE "{Lease.__tablename__}" ADD COLUMN "{column_name}" {column_type}')
|
||||
# SITE_KEY_XID
|
||||
if site_key := env('SITE_KEY_XID', None) is not None:
|
||||
site.site_key = str(site_key)
|
||||
|
||||
upgrade_1_0_to_1_1()
|
||||
# upgrade_1_2_to_1_3()
|
||||
# INSTANCE_REF
|
||||
if instance_ref := env('INSTANCE_REF', None) is not None:
|
||||
instance.instance_ref = str(instance_ref)
|
||||
|
||||
# ALLOTMENT_REF
|
||||
if allotment_ref := env('ALLOTMENT_REF', None) is not None:
|
||||
pass # todo
|
||||
|
||||
# INSTANCE_KEY_RSA, INSTANCE_KEY_PUB
|
||||
default_instance_private_key_path = str(join(dirname(__file__), 'cert/instance.private.pem'))
|
||||
if instance_private_key := env('INSTANCE_KEY_RSA', None) is not None:
|
||||
instance.private_key = load_key(str(instance_private_key))
|
||||
elif isfile(default_instance_private_key_path):
|
||||
instance.private_key = load_key(default_instance_private_key_path)
|
||||
default_instance_public_key_path = str(join(dirname(__file__), 'cert/instance.public.pem'))
|
||||
if instance_public_key := env('INSTANCE_KEY_PUB', None) is not None:
|
||||
instance.public_key = load_key(str(instance_public_key))
|
||||
elif isfile(default_instance_public_key_path):
|
||||
instance.public_key = load_key(default_instance_public_key_path)
|
||||
|
||||
# TOKEN_EXPIRE_DELTA
|
||||
if token_expire_delta := env('TOKEN_EXPIRE_DAYS', None) not in (None, 0):
|
||||
instance.token_expire_delta = token_expire_delta * 86_400
|
||||
if token_expire_delta := env('TOKEN_EXPIRE_HOURS', None) not in (None, 0):
|
||||
instance.token_expire_delta = token_expire_delta * 3_600
|
||||
|
||||
# LEASE_EXPIRE_DELTA, LEASE_RENEWAL_DELTA
|
||||
if lease_expire_delta := env('LEASE_EXPIRE_DAYS', None) not in (None, 0):
|
||||
instance.lease_expire_delta = lease_expire_delta * 86_400
|
||||
if lease_expire_delta := env('LEASE_EXPIRE_HOURS', None) not in (None, 0):
|
||||
instance.lease_expire_delta = lease_expire_delta * 3_600
|
||||
|
||||
# LEASE_RENEWAL_PERIOD
|
||||
if lease_renewal_period := env('LEASE_RENEWAL_PERIOD', None) is not None:
|
||||
instance.lease_renewal_period = lease_renewal_period
|
||||
|
||||
# todo: update site, instance
|
||||
|
||||
upgrade_1_x_to_2_0()
|
||||
|
||||
12
app/util.py
12
app/util.py
@@ -16,6 +16,18 @@ def load_key(filename) -> "RsaKey":
|
||||
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
|
||||
|
||||
|
||||
def parse_key(content: bytes) -> "RsaKey":
|
||||
try:
|
||||
# Crypto | Cryptodome on Debian
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
except ModuleNotFoundError:
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.PublicKey.RSA import RsaKey
|
||||
|
||||
return RSA.import_key(extern_key=content, passphrase=None)
|
||||
|
||||
|
||||
def generate_key() -> "RsaKey":
|
||||
try:
|
||||
# Crypto | Cryptodome on Debian
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
fastapi==0.89.1
|
||||
uvicorn[standard]==0.20.0
|
||||
fastapi==0.97.0
|
||||
uvicorn[standard]==0.22.0
|
||||
python-jose==3.3.0
|
||||
pycryptodome==3.17
|
||||
pycryptodome==3.18.0
|
||||
python-dateutil==2.8.2
|
||||
sqlalchemy==2.0.0
|
||||
markdown==3.4.1
|
||||
python-dotenv==0.21.1
|
||||
sqlalchemy==2.0.16
|
||||
markdown==3.4.3
|
||||
python-dotenv==1.0.0
|
||||
|
||||
35
test/main.py
35
test/main.py
@@ -1,14 +1,15 @@
|
||||
from os import getenv as env
|
||||
from base64 import b64encode as b64enc
|
||||
from hashlib import sha256
|
||||
from calendar import timegm
|
||||
from datetime import datetime
|
||||
from os.path import dirname, join
|
||||
from uuid import uuid4, UUID
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from jose import jwt, jwk
|
||||
from jose import jwt
|
||||
from jose.constants import ALGORITHMS
|
||||
from starlette.testclient import TestClient
|
||||
from sqlalchemy import create_engine
|
||||
import sys
|
||||
|
||||
# add relative path to use packages as they were in the app/ dir
|
||||
@@ -16,20 +17,23 @@ sys.path.append('../')
|
||||
sys.path.append('../app')
|
||||
|
||||
from app import main
|
||||
from app.util import load_key
|
||||
from app.orm import init as db_init, migrate, Site, Instance
|
||||
|
||||
client = TestClient(main.app)
|
||||
|
||||
ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld'
|
||||
|
||||
# INSTANCE_KEY_RSA = generate_key()
|
||||
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
|
||||
# fastapi setup
|
||||
client = TestClient(main.app)
|
||||
|
||||
INSTANCE_KEY_RSA = load_key(str(join(dirname(__file__), '../app/cert/instance.private.pem')))
|
||||
INSTANCE_KEY_PUB = load_key(str(join(dirname(__file__), '../app/cert/instance.public.pem')))
|
||||
# database setup
|
||||
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
|
||||
db_init(db), migrate(db)
|
||||
|
||||
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
# test vars
|
||||
DEFAULT_SITE, DEFAULT_INSTANCE = Site.get_default_site(db), Instance.get_default_instance(db)
|
||||
|
||||
SITE_KEY = DEFAULT_SITE.site_key
|
||||
jwt_encode_key, jwt_decode_key = DEFAULT_INSTANCE.get_jwt_encode_key(), DEFAULT_INSTANCE.get_jwt_decode_key()
|
||||
|
||||
|
||||
def __bearer_token(origin_ref: str) -> str:
|
||||
@@ -38,6 +42,12 @@ def __bearer_token(origin_ref: str) -> str:
|
||||
return token
|
||||
|
||||
|
||||
def test_initial_default_site_and_instance():
|
||||
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
|
||||
assert default_site.site_key == Site.INITIAL_SITE_KEY_XID
|
||||
assert default_instance.instance_ref == Instance.DEFAULT_INSTANCE_REF
|
||||
|
||||
|
||||
def test_index():
|
||||
response = client.get('/')
|
||||
assert response.status_code == 200
|
||||
@@ -153,8 +163,7 @@ def test_auth_v1_token():
|
||||
"kid": "00000000-0000-0000-0000-000000000000"
|
||||
}
|
||||
payload = {
|
||||
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')},
|
||||
algorithm=ALGORITHMS.RS256),
|
||||
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256),
|
||||
"code_verifier": SECRET,
|
||||
}
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
VERSION=1.3.5
|
||||
Reference in New Issue
Block a user