54 Commits

Author SHA1 Message Date
Oscar Krause
699dbf6fac Merge branch '1-parsing-issue-in-mal-formatted-mac_address_list' into 'main'
Resolve "Parsing issue in mal formatted "mac_address_list""

Closes #1

See merge request oscar.krause/fastapi-dls!40
2024-11-21 09:18:22 +01:00
Oscar Krause
317699ff58 code styling 2024-11-21 08:51:39 +01:00
Oscar Krause
55446f7d9c fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
88c78efcd9 fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
fb3ac4291f code styling 2024-11-21 08:51:39 +01:00
Oscar Krause
15f14cac11 implemented "SUPPORT_MALFORMED_JSON" variable 2024-11-21 08:51:39 +01:00
Oscar Krause
018d7c34fc fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
1aee423120 fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
a6b2f2a942 fixed json payload 2024-11-21 08:51:39 +01:00
Oscar Krause
e33024db86 fixed variable names
ref. oscar.krause/fastapi-dls#1
2024-11-21 08:51:39 +01:00
Oscar Krause
4ad15f0849 fix malformed json on auth
ref. oscar.krause/fastapi-dls#1
2024-11-21 08:51:39 +01:00
Oscar Krause
7bad0359af updated ci pipeline to match current eol supported systems 2024-11-21 08:44:14 +01:00
Oscar Krause
59a7c9f15a Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!38
2024-11-13 16:11:40 +01:00
Oscar Krause
bc6d692f0a added "delete_expired" method for leases 2024-11-13 15:03:37 +01:00
Oscar Krause
63c37c6334 fixed timezone in json response 2024-11-13 15:03:12 +01:00
Oscar Krause
fa2c06972e sql query improvements 2024-11-13 15:01:33 +01:00
Oscar Krause
e4e6387b2a ci improvements 2024-11-13 14:58:55 +01:00
Oscar Krause
f2be9dca8d Merge branch 'dev' into 'main'
requirements.txt updated

See merge request oscar.krause/fastapi-dls!36
2024-11-13 14:09:54 +01:00
Oscar Krause
52dd425583 fixes 2024-11-13 13:41:07 +01:00
Oscar Krause
286399d79a fixed test matrix 2024-11-13 10:48:11 +01:00
Oscar Krause
4ab1a2ed22 added requirements for ubuntu 24.10 2024-11-13 10:28:08 +01:00
Oscar Krause
459c0e21af debugging 2024-11-13 10:27:52 +01:00
Oscar Krause
98ef64211b typings 2024-11-13 09:09:00 +01:00
Oscar Krause
0b4bb65546 added python3-pip to test 2024-11-13 08:55:00 +01:00
Oscar Krause
47624f5019 Dockerfile - updated db dependencies 2024-11-13 08:37:07 +01:00
Oscar Krause
2b9d7821c0 improved gitlab test matrix 2024-11-13 08:33:28 +01:00
Oscar Krause
45f5108717 requirements.txt updated 2024-11-13 08:25:40 +01:00
Oscar Krause
a7fe8b867e Merge branch 'dev' into 'main'
added way to include driver version in api

See merge request oscar.krause/fastapi-dls!35
2024-10-24 13:28:08 +02:00
Oscar Krause
78214df9cc updated to python 3.12 2024-10-24 10:44:31 +02:00
Oscar Krause
4245d5a582 requirements.txt updated 2024-10-24 08:09:30 +02:00
Oscar Krause
9b5a387169 updated support matrix 2024-10-24 08:09:24 +02:00
Oscar Krause
9377d5ce28 requirements.txt updated 2024-10-08 14:33:44 +02:00
Oscar Krause
7489307db8 README.md updated 2024-08-09 13:15:16 +02:00
Oscar Krause
d41314e81d requirements.txt updated 2024-08-09 13:14:53 +02:00
Oscar Krause
a1123d5451 updated support matrix (removed EOL) 2024-07-24 05:35:22 +02:00
Oscar Krause
93cf719454 updated support matrix 2024-07-24 05:35:09 +02:00
Oscar Krause
0dc8f6c582 refactorings 2024-07-11 05:49:13 +02:00
Oscar Krause
4b0219b85a updated to new vgpu page
ref. https://docs.nvidia.com/vgpu/index.html
2024-07-11 05:49:00 +02:00
Oscar Krause
8edbb25c16 README updated 2024-06-27 08:49:31 +02:00
Oscar Krause
49a24f0b68 README updated 2024-06-27 08:47:35 +02:00
Oscar Krause
8af3c8e2b3 README updated 2024-06-27 08:47:04 +02:00
Oscar Krause
3c321a202c README updated 2024-06-27 08:45:25 +02:00
Oscar Krause
1b7d8bc0dc README reorganized 2024-06-27 08:37:21 +02:00
Oscar Krause
23ccea538f README reorganized 2024-06-27 08:32:15 +02:00
Oscar Krause
6a54c05fbb Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!34
2024-06-18 13:56:30 +02:00
Oscar Krause
006d3a1833 Merge branch 'dev' into 'main'
Added Ubuntu 24.04 support & updated requirements

See merge request oscar.krause/fastapi-dls!33
2024-05-10 10:52:00 +02:00
Oscar Krause
42fe066e1a Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!32
2024-04-18 07:38:31 +02:00
Oscar Krause
ef542ec821 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!31
2024-04-09 10:28:57 +02:00
Oscar Krause
5b39598487 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!30
2024-02-27 08:20:43 +01:00
Oscar Krause
65de4d0534 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!29
2023-10-16 10:27:49 +02:00
Oscar Krause
58ffa752f3 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!28
2023-07-10 19:11:28 +02:00
Oscar Krause
fd4fa84dc5 fixed docker image name (gitlab registry) 2023-07-04 19:39:06 +02:00
Oscar Krause
5ff3295658 fixed deploy docker 2023-07-04 18:58:13 +02:00
Oscar Krause
ca38ebe3fd Merge branch 'dev' into 'main'
Multiarch to DockerHub

See merge request oscar.krause/fastapi-dls!27
2023-07-04 18:47:45 +02:00
14 changed files with 297 additions and 497 deletions

View File

@@ -1,10 +0,0 @@
# https://packages.ubuntu.com
fastapi==0.91.0
uvicorn[standard]==0.15.0
python-jose[pycryptodome]==3.3.0
pycryptodome==3.11.0
python-dateutil==2.8.2
sqlalchemy==1.4.46
markdown==3.4.3
python-dotenv==0.21.0
jinja2==3.1.2

View File

@@ -1,10 +0,0 @@
# https://packages.ubuntu.com
fastapi==0.101.0
uvicorn[standard]==0.23.2
python-jose[pycryptodome]==3.3.0
pycryptodome==3.11.0
python-dateutil==2.8.2
sqlalchemy==1.4.47
markdown==3.4.4
python-dotenv==1.0.0
jinja2==3.1.2

View File

@@ -0,0 +1,10 @@
# https://packages.ubuntu.com
fastapi==0.110.3
uvicorn[standard]==0.30.3
python-jose[pycryptodome]==3.3.0
pycryptodome==3.20.0
python-dateutil==2.9.0
sqlalchemy==2.0.32
markdown==3.6
python-dotenv==1.0.1
jinja2==3.1.3

View File

@@ -48,6 +48,7 @@ package() {
install -Dm755 "$srcdir/$pkgname/app/main.py" "$pkgdir/opt/$pkgname/main.py"
install -Dm755 "$srcdir/$pkgname/app/orm.py" "$pkgdir/opt/$pkgname/orm.py"
install -Dm755 "$srcdir/$pkgname/app/util.py" "$pkgdir/opt/$pkgname/util.py"
install -Dm755 "$srcdir/$pkgname/app/middleware.py" "$pkgdir/opt/$pkgname/middleware.py"
install -Dm644 "$srcdir/$pkgname.default" "$pkgdir/etc/default/$pkgname"
install -Dm644 "$srcdir/$pkgname.service" "$pkgdir/usr/lib/systemd/system/$pkgname.service"
install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"

View File

@@ -20,6 +20,7 @@ build:docker:
changes:
- app/**/*
- Dockerfile
- requirements.txt
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
tags: [ docker ]
before_script:
@@ -141,14 +142,19 @@ test:
DATABASE: sqlite:///../app/db.sqlite
parallel:
matrix:
- IMAGE: [ 'python:3.11-slim-bookworm', 'python:3.12-slim-bullseye' ]
REQUIREMENTS:
- requirements.txt
- .DEBIAN/requirements-bookworm-12.txt
- .DEBIAN/requirements-ubuntu-23.10.txt
- .DEBIAN/requirements-ubuntu-24.04.txt
- IMAGE: [ 'python:3.12-slim-bookworm' ]
REQUIREMENTS: [ 'requirements.txt' ]
- IMAGE: [ 'debian:bookworm' ] # EOL: June 06, 2026
REQUIREMENTS: [ '.DEBIAN/requirements-bookworm-12.txt' ]
- IMAGE: [ 'ubuntu:24.04' ] # EOL: April 2036
REQUIREMENTS: [ '.DEBIAN/requirements-ubuntu-24.04.txt' ]
- IMAGE: [ 'ubuntu:24.10' ]
REQUIREMENTS: [ '.DEBIAN/requirements-ubuntu-24.10.txt' ]
before_script:
- apt-get update && apt-get install -y python3-dev gcc
- apt-get update && apt-get install -y python3-dev python3-pip python3-venv gcc
- python3 -m venv venv
- source venv/bin/activate
- pip install --upgrade pip
- pip install -r $REQUIREMENTS
- pip install pytest httpx
- mkdir -p app/cert
@@ -162,7 +168,7 @@ test:
dotenv: version.env
junit: ['**/report.xml']
.test:linux:
.test:apt:
stage: test
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
@@ -201,15 +207,15 @@ test:
- apt-get purge -qq -y fastapi-dls
- apt-get autoremove -qq -y && apt-get clean -qq
test:debian:
extends: .test:linux
test:apt:debian:
extends: .test:apt
image: debian:bookworm-slim
test:ubuntu:
extends: .test:linux
test:apt:ubuntu:
extends: .test:apt
image: ubuntu:24.04
test:archlinux:
test:pacman:archlinux:
image: archlinux:base
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
@@ -250,7 +256,7 @@ semgrep-sast:
test_coverage:
# extends: test
image: python:3.11-slim-bookworm
image: python:3.12-slim-bookworm
allow_failure: true
stage: test
rules:
@@ -297,13 +303,17 @@ gemnasium-python-dependency_scanning:
deploy:docker:
extends: .deploy
image: docker:dind
stage: deploy
tags: [ docker ]
before_script:
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
- docker buildx inspect
- docker buildx create --use
script:
- echo "========== GitLab-Registry =========="
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME --push .
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest --push .
- echo "========== Docker-Hub =========="

View File

@@ -1,4 +1,4 @@
FROM python:3.11-alpine
FROM python:3.12-alpine
ARG VERSION
ARG COMMIT=""
@@ -10,7 +10,7 @@ RUN apk update \
&& apk add --no-cache --virtual build-deps gcc g++ python3-dev musl-dev pkgconfig \
&& apk add --no-cache curl postgresql postgresql-dev mariadb-dev sqlite-dev \
&& pip install --no-cache-dir --upgrade uvicorn \
&& pip install --no-cache-dir psycopg2==2.9.9 mysqlclient==2.2.4 pysqlite3==0.5.2 \
&& pip install --no-cache-dir psycopg2==2.9.10 mysqlclient==2.2.6 pysqlite3==0.5.4 \
&& pip install --no-cache-dir -r /tmp/requirements.txt \
&& apk del build-deps

135
README.md
View File

@@ -2,7 +2,8 @@
Minimal Delegated License Service (DLS).
Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0. For Driver compatibility see [here](#setup-client).
Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1. For Driver compatibility
see [compatibility matrix](#vgpu-software-compatibility-matrix).
This service can be used without internet connection.
Only the clients need a connection to this service on configured port.
@@ -42,6 +43,9 @@ Tested with Ubuntu 22.10 (EOL!) (from Proxmox templates), actually its consuming
- Make sure your timezone is set correct on you fastapi-dls server and your client
This guide does not show how to install vGPU host drivers! Look at the official documentation packed with the driver
releases.
## Docker
Docker-Images are available here for Intel (x86), AMD (amd64) and ARM (arm64):
@@ -326,11 +330,11 @@ Packages are available here:
Successful tested with:
- Debian 12 (Bookworm) (EOL: tba.)
- Ubuntu 22.10 (Kinetic Kudu) (EOL: July 20, 2023)
- Ubuntu 23.04 (Lunar Lobster) (EOL: January 2024)
- Ubuntu 23.10 (Mantic Minotaur) (EOL: July 2024)
- Ubuntu 24.04 (Noble Numbat) (EOL: April 2036)
- **Debian 12 (Bookworm)** (EOL: June 06, 2026)
- *Ubuntu 22.10 (Kinetic Kudu)* (EOL: July 20, 2023)
- *Ubuntu 23.04 (Lunar Lobster)* (EOL: January 2024)
- *Ubuntu 23.10 (Mantic Minotaur)* (EOL: July 2024)
- **Ubuntu 24.04 (Noble Numbat)** (EOL: April 2036)
Not working with:
@@ -406,21 +410,22 @@ After first success you have to replace `--issue` with `--renew`.
# Configuration
| Variable | Default | Usage |
|------------------------|----------------------------------------|------------------------------------------------------------------------------------------------------|
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 |
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 |
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 |
| Variable | Default | Usage |
|--------------------------|----------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------|
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 |
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 |
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 |
| `SUPPORT_MALFORMED_JSON` | `false` | Support parsing for mal formatted "mac_address_list" ([Issue](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/issues/1)) |
\*1 For example, if the lease period is one day and the renewal period is 20%, the client attempts to renew its license
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
@@ -434,32 +439,8 @@ client has 19.2 hours in which to re-establish connectivity before its license e
**The token file has to be copied! It's not enough to C&P file contents, because there can be special characters.**
Successfully tested with this package versions:
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:|
| `17.2` | R550 | `550.90.05` | `550.90.07` | `552.55` | June 2024 | February 2025 |
| `17.1` | R550 | `550.54.16` | `550.54.15` | `551.78` | March 2024 | |
| `17.0` | R550 | `550.54.10` | `550.54.14` | `551.61` | February 2024 | |
| `16.6` | R535 | `535.183.04` | `535.183.01` | `538.67` | June 2024 | July 2026 |
| `16.5` | R535 | `535.161.05` | `535.161.08` | `538.46` | February 2024 | |
| `16.4` | R535 | `535.161.05` | `535.161.07` | `538.33` | February 2024 | |
| `16.3` | R535 | `535.154.02` | `535.154.05` | `538.15` | January 2024 | |
| `16.2` | R535 | `535.129.03` | `535.129.03` | `537.70` | October 2023 | |
| `16.1` | R535 | `535.104.06` | `535.104.05` | `537.13` | August 2023 | |
| `16.0` | R535 | `535.54.06` | `535.54.03` | `536.22` | July 2023 | |
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | October 2023 |
| `15.3` | R525 | `525.125.03` | `525.125.06` | `529.11` | June 2023 | |
| `15.2` | R525 | `525.105.14` | `525.105.17` | `528.89` | March 2023 | |
| `15.1` | R525 | `525.85.07` | `525.85.05` | `528.24` | January 2023 | |
| `15.0` | R525 | `525.60.12` | `525.60.13` | `527.41` | December 2022 | |
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
| `14.3` | R510 | `510.108.03` | `510.108.03` | `513.91` | November 2022 | |
- https://docs.nvidia.com/grid/index.html
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`) on channel `licensing` `biggerthanshit`
This guide does not show how to install vGPU guest drivers! Look at the official documentation packed with the driver
releases.
## Linux
@@ -535,33 +516,32 @@ Done. For more information check [troubleshoot section](#troubleshoot).
8. Set schedule to `At First Array Start Only`
9. Click on Apply
# Endpoints
# API Endpoints
<details>
<summary>show</summary>
### `GET /`
**`GET /`**
Redirect to `/-/readme`.
### `GET /-/health`
**`GET /-/health`**
Status endpoint, used for *healthcheck*.
### `GET /-/config`
**`GET /-/config`**
Shows current runtime environment variables and their values.
### `GET /-/readme`
**`GET /-/readme`**
HTML rendered README.md.
### `GET /-/manage`
**`GET /-/manage`**
Shows a very basic UI to delete origins or leases.
### `GET /-/origins?leases=false`
**`GET /-/origins?leases=false`**
List registered origins.
@@ -569,11 +549,11 @@ List registered origins.
|-----------------|---------|--------------------------------------|
| `leases` | `false` | Include referenced leases per origin |
### `DELETE /-/origins`
**`DELETE /-/origins`**
Deletes all origins and their leases.
### `GET /-/leases?origin=false`
**`GET /-/leases?origin=false`**
List current leases.
@@ -581,15 +561,15 @@ List current leases.
|-----------------|---------|-------------------------------------|
| `origin` | `false` | Include referenced origin per lease |
### `DELETE /-/lease/{lease_ref}`
**`DELETE /-/lease/{lease_ref}`**
Deletes an lease.
### `GET /-/client-token`
**`GET /-/client-token`**
Generate client token, (see [installation](#installation)).
### Others
**Others**
There are many other internal api endpoints for handling authentication and lease process.
</details>
@@ -616,9 +596,9 @@ Logs are available in `C:\Users\Public\Documents\Nvidia\LoggingLog.NVDisplay.Con
## Linux
### `uvicorn.error:Invalid HTTP request received.`
### Invalid HTTP request
This message can be ignored.
This error message: `uvicorn.error:Invalid HTTP request received.` can be ignored.
- Ref. https://github.com/encode/uvicorn/issues/441
@@ -744,11 +724,40 @@ The error message can safely be ignored (since we have no license limitation :P)
</details>
# vGPU Software Compatibility Matrix
Successfully tested with this package versions.
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:|
| `17.4` | R550 | `550.127.06` | `550.127.05` | `553.24` | October 2024 | February 2025 |
| `17.3` | R550 | `550.90.05` | `550.90.07` | `552.74` | July 2024 | |
| `17.2` | R550 | `550.90.05` | `550.90.07` | `552.55` | June 2024 | |
| `17.1` | R550 | `550.54.16` | `550.54.15` | `551.78` | March 2024 | |
| `17.0` | R550 | `550.54.10` | `550.54.14` | `551.61` | February 2024 | |
| `16.8` | R535 | `535.216.01` | `535.216.01` | `538.95` | October 2024 | July 2026 |
| `16.7` | R535 | `535.183.04` | `535.183.06` | `538.78` | July 2024 | |
| `16.6` | R535 | `535.183.04` | `535.183.01` | `538.67` | June 2024 | |
| `16.5` | R535 | `535.161.05` | `535.161.08` | `538.46` | February 2024 | |
| `16.4` | R535 | `535.161.05` | `535.161.07` | `538.33` | February 2024 | |
| `16.3` | R535 | `535.154.02` | `535.154.05` | `538.15` | January 2024 | |
| `16.2` | R535 | `535.129.03` | `535.129.03` | `537.70` | October 2023 | |
| `16.1` | R535 | `535.104.06` | `535.104.05` | `537.13` | August 2023 | |
| `16.0` | R535 | `535.54.06` | `535.54.03` | `536.22` | July 2023 | |
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | December 2023 |
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
- https://docs.nvidia.com/grid/index.html
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`)
on channel `licensing`
# Credits
Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to
Special thanks to:
- @samicrusader who created build file for **ArchLinux**
- @cyrus who wrote the section for **openSUSE**

View File

@@ -2,7 +2,7 @@ import logging
from base64 import b64encode as b64enc
from calendar import timegm
from contextlib import asynccontextmanager
from datetime import datetime
from datetime import datetime, timedelta
from hashlib import sha256
from json import loads as json_loads
from os import getenv as env
@@ -13,15 +13,15 @@ from dateutil.relativedelta import relativedelta
from dotenv import load_dotenv
from fastapi import FastAPI
from fastapi.requests import Request
from jose import jws, jwt, JWTError
from jose import jws, jwk, jwt, JWTError
from jose.constants import ALGORITHMS
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, \
RedirectResponse
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
from orm import init as db_init, migrate, Site, Instance, Origin, Lease
from orm import Origin, Lease, init as db_init, migrate
from util import load_key, load_file
# Load variables
load_dotenv('../version.env')
@@ -39,34 +39,46 @@ db_init(db), migrate(db)
# Load DLS variables (all prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service)
DLS_URL = str(env('DLS_URL', 'localhost'))
DLS_PORT = int(env('DLS_PORT', '443'))
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001'))
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001'))
INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0)))
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12)
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
# Logging
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
logging.basicConfig(format='[{levelname:^7}] [{module:^15}] {message}', style='{')
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
logging.getLogger('util').setLevel(LOG_LEVEL)
logging.getLogger('NV').setLevel(LOG_LEVEL)
# FastAPI
@asynccontextmanager
async def lifespan(_: FastAPI):
# on startup
default_instance = Instance.get_default_instance(db)
lease_renewal_period = default_instance.lease_renewal_period
lease_renewal_delta = default_instance.get_lease_renewal_delta()
client_token_expire_delta = default_instance.get_client_token_expire_delta()
logger.info(f'''
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
Your clients renew their license every {str(Lease.calculate_renewal(LEASE_RENEWAL_PERIOD, LEASE_RENEWAL_DELTA))}.
If the renewal fails, the license is {str(LEASE_RENEWAL_DELTA)} valid.
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
Your client-token file (.tok) is valid for {str(CLIENT_TOKEN_EXPIRE_DELTA)}.
''')
logger.info(f'Debug is {"enabled" if DEBUG else "disabled"}.')
validate_settings()
yield
# on shutdown
@@ -84,35 +96,20 @@ app.add_middleware(
allow_methods=['*'],
allow_headers=['*'],
)
if bool(env('SUPPORT_MALFORMED_JSON', False)):
from middleware import PatchMalformedJsonMiddleware
# Logging
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
logging.basicConfig(format='[{levelname:^7}] [{module:^15}] {message}', style='{')
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
logging.getLogger('util').setLevel(LOG_LEVEL)
logging.getLogger('NV').setLevel(LOG_LEVEL)
logger.info(f'Enabled "PatchMalformedJsonMiddleware"!')
app.add_middleware(PatchMalformedJsonMiddleware, enabled=True)
# Helper
def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
def __get_token(request: Request) -> dict:
authorization_header = request.headers.get('authorization')
token = authorization_header.split(' ')[1]
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
def validate_settings():
session = sessionmaker(bind=db)()
lease_expire_delta_min, lease_expire_delta_max = 86_400, 7_776_000
for instance in session.query(Instance).all():
lease_expire_delta = instance.lease_expire_delta
if lease_expire_delta < 86_400 or lease_expire_delta > 7_776_000:
logging.warning(f'> [ instance ]: {instance.instance_ref}: "lease_expire_delta" should be between {lease_expire_delta_min} and {lease_expire_delta_max}')
session.close()
# Endpoints
@app.get('/', summary='Index')
@@ -132,20 +129,18 @@ async def _health():
@app.get('/-/config', summary='* Config', description='returns environment variables.')
async def _config():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
return JSONr({
'VERSION': str(VERSION),
'COMMIT': str(COMMIT),
'DEBUG': str(DEBUG),
'DLS_URL': str(DLS_URL),
'DLS_PORT': str(DLS_PORT),
'SITE_KEY_XID': str(default_site.site_key),
'INSTANCE_REF': str(default_instance.instance_ref),
'SITE_KEY_XID': str(SITE_KEY_XID),
'INSTANCE_REF': str(INSTANCE_REF),
'ALLOTMENT_REF': [str(ALLOTMENT_REF)],
'TOKEN_EXPIRE_DELTA': str(default_instance.get_token_expire_delta()),
'LEASE_EXPIRE_DELTA': str(default_instance.get_lease_expire_delta()),
'LEASE_RENEWAL_PERIOD': str(default_instance.lease_renewal_period),
'TOKEN_EXPIRE_DELTA': str(TOKEN_EXPIRE_DELTA),
'LEASE_EXPIRE_DELTA': str(LEASE_EXPIRE_DELTA),
'LEASE_RENEWAL_PERIOD': str(LEASE_RENEWAL_PERIOD),
'CORS_ORIGINS': str(CORS_ORIGINS),
'TZ': str(TZ),
})
@@ -154,7 +149,6 @@ async def _config():
@app.get('/-/readme', summary='* Readme')
async def _readme():
from markdown import markdown
from util import load_file
content = load_file(join(dirname(__file__), '../README.md')).decode('utf-8')
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
@@ -204,7 +198,8 @@ async def _origins(request: Request, leases: bool = False):
for origin in session.query(Origin).all():
x = origin.serialize()
if leases:
x['leases'] = list(map(lambda _: _.serialize(), Lease.find_by_origin_ref(db, origin.origin_ref)))
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
x['leases'] = list(map(lambda _: _.serialize(**serialize), Lease.find_by_origin_ref(db, origin.origin_ref)))
response.append(x)
session.close()
return JSONr(response)
@@ -221,7 +216,8 @@ async def _leases(request: Request, origin: bool = False):
session = sessionmaker(bind=db)()
response = []
for lease in session.query(Lease).all():
x = lease.serialize()
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
x = lease.serialize(**serialize)
if origin:
lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first()
if lease_origin is not None:
@@ -248,13 +244,7 @@ async def _lease_delete(request: Request, lease_ref: str):
@app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance')
async def _client_token():
cur_time = datetime.utcnow()
default_instance = Instance.get_default_instance(db)
public_key = default_instance.get_public_key()
# todo: implemented request parameter to support different instances
jwt_encode_key = default_instance.get_jwt_encode_key()
exp_time = cur_time + default_instance.get_client_token_expire_delta()
exp_time = cur_time + CLIENT_TOKEN_EXPIRE_DELTA
payload = {
"jti": str(uuid4()),
@@ -267,7 +257,7 @@ async def _client_token():
"scope_ref_list": [ALLOTMENT_REF],
"fulfillment_class_ref_list": [],
"service_instance_configuration": {
"nls_service_instance_ref": default_instance.instance_ref,
"nls_service_instance_ref": INSTANCE_REF,
"svc_port_set_list": [
{
"idx": 0,
@@ -279,10 +269,10 @@ async def _client_token():
},
"service_instance_public_key_configuration": {
"service_instance_public_key_me": {
"mod": hex(public_key.public_key().n)[2:],
"exp": int(public_key.public_key().e),
"mod": hex(INSTANCE_KEY_PUB.public_key().n)[2:],
"exp": int(INSTANCE_KEY_PUB.public_key().e),
},
"service_instance_public_key_pem": public_key.export_key().decode('utf-8'),
"service_instance_public_key_pem": INSTANCE_KEY_PUB.export_key().decode('utf-8'),
"key_retention_mode": "LATEST_ONLY"
},
}
@@ -364,16 +354,13 @@ async def auth_v1_code(request: Request):
delta = relativedelta(minutes=15)
expires = cur_time + delta
default_site = Site.get_default_site(db)
jwt_encode_key = Instance.get_default_instance(db).get_jwt_encode_key()
payload = {
'iat': timegm(cur_time.timetuple()),
'exp': timegm(expires.timetuple()),
'challenge': j.get('code_challenge'),
'origin_ref': j.get('origin_ref'),
'key_ref': default_site.site_key,
'kid': default_site.site_key,
'key_ref': SITE_KEY_XID,
'kid': SITE_KEY_XID
}
auth_code = jws.sign(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@@ -393,11 +380,8 @@ async def auth_v1_code(request: Request):
async def auth_v1_token(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
jwt_encode_key, jwt_decode_key = default_instance.get_jwt_encode_key(), default_instance.get_jwt_decode_key()
try:
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key, algorithms=[ALGORITHMS.RS256])
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key)
except JWTError as e:
return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)})
@@ -409,7 +393,7 @@ async def auth_v1_token(request: Request):
if payload.get('challenge') != challenge:
return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'})
access_expires_on = cur_time + default_instance.get_token_expire_delta()
access_expires_on = cur_time + TOKEN_EXPIRE_DELTA
new_payload = {
'iat': timegm(cur_time.timetuple()),
@@ -418,8 +402,8 @@ async def auth_v1_token(request: Request):
'aud': 'https://cls.nvidia.org',
'exp': timegm(access_expires_on.timetuple()),
'origin_ref': origin_ref,
'key_ref': default_site.site_key,
'kid': default_site.site_key,
'key_ref': SITE_KEY_XID,
'kid': SITE_KEY_XID,
}
auth_token = jwt.encode(new_payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@@ -436,13 +420,10 @@ async def auth_v1_token(request: Request):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin')
async def leasing_v1_lessor(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
j, token, cur_time = json_loads((await request.body()).decode('utf-8')), __get_token(request), datetime.utcnow()
try:
token = __get_token(request, jwt_decode_key)
token = __get_token(request)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
@@ -456,7 +437,7 @@ async def leasing_v1_lessor(request: Request):
# return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]')
lease_ref = str(uuid4())
expires = cur_time + default_instance.get_lease_expire_delta()
expires = cur_time + LEASE_EXPIRE_DELTA
lease_result_list.append({
"ordinal": 0,
# https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html
@@ -464,13 +445,13 @@ async def leasing_v1_lessor(request: Request):
"ref": lease_ref,
"created": cur_time.isoformat(),
"expires": expires.isoformat(),
"recommended_lease_renewal": default_instance.lease_renewal_period,
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
"offline_lease": "true",
"license_type": "CONCURRENT_COUNTED_SINGLE"
}
})
data = Lease(instance_ref=default_instance.instance_ref, origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
data = Lease(origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
Lease.create_or_update(db, data)
response = {
@@ -487,14 +468,7 @@ async def leasing_v1_lessor(request: Request):
# venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql
@app.get('/leasing/v1/lessor/leases', description='get active leases for current origin')
async def leasing_v1_lessor_lease(request: Request):
cur_time = datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
token, cur_time = __get_token(request), datetime.utcnow()
origin_ref = token.get('origin_ref')
@@ -514,15 +488,7 @@ async def leasing_v1_lessor_lease(request: Request):
# venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py
@app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease')
async def leasing_v1_lease_renew(request: Request, lease_ref: str):
cur_time = datetime.utcnow()
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
token, cur_time = __get_token(request), datetime.utcnow()
origin_ref = token.get('origin_ref')
logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
@@ -531,11 +497,11 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
if entity is None:
return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'})
expires = cur_time + default_instance.get_lease_expire_delta()
expires = cur_time + LEASE_EXPIRE_DELTA
response = {
"lease_ref": lease_ref,
"expires": expires.isoformat(),
"recommended_lease_renewal": default_instance.lease_renewal_period,
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
"offline_lease": True,
"prompts": None,
"sync_timestamp": cur_time.isoformat(),
@@ -549,14 +515,7 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py
@app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease')
async def leasing_v1_lease_delete(request: Request, lease_ref: str):
cur_time = datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
token, cur_time = __get_token(request), datetime.utcnow()
origin_ref = token.get('origin_ref')
logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
@@ -582,14 +541,7 @@ async def leasing_v1_lease_delete(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.delete('/leasing/v1/lessor/leases', description='release all leases')
async def leasing_v1_lessor_lease_remove(request: Request):
cur_time = datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
token, cur_time = __get_token(request), datetime.utcnow()
origin_ref = token.get('origin_ref')
@@ -611,8 +563,6 @@ async def leasing_v1_lessor_lease_remove(request: Request):
async def leasing_v1_lessor_shutdown(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
token = j.get('token')
token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
origin_ref = token.get('origin_ref')

43
app/middleware.py Normal file
View File

@@ -0,0 +1,43 @@
import json
import logging
import re
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.requests import Request
logger = logging.getLogger(__name__)
class PatchMalformedJsonMiddleware(BaseHTTPMiddleware):
# see oscar.krause/fastapi-dls#1
REGEX = '(\"mac_address_list\"\:\s?\[)([\w\d])'
def __init__(self, app, enabled: bool):
super().__init__(app)
self.enabled = enabled
async def dispatch(self, request: Request, call_next):
body = await request.body()
content_type = request.headers.get('Content-Type')
if self.enabled and content_type == 'application/json':
body = body.decode()
try:
json.loads(body)
except json.decoder.JSONDecodeError:
logger.warning(f'Malformed json received! Try to fix it, "PatchMalformedJsonMiddleware" is enabled.')
s = PatchMalformedJsonMiddleware.fix_json(body)
logger.debug(f'Fixed JSON: "{s}"')
s = json.loads(s) # ensure json is now valid
# set new body
request._body = json.dumps(s).encode('utf-8')
response = await call_next(request)
return response
@staticmethod
def fix_json(s: str) -> str:
s = s.replace('\t', '')
s = s.replace('\n', '')
return re.sub(PatchMalformedJsonMiddleware.REGEX, r'\1"\2', s)

View File

@@ -1,144 +1,20 @@
import logging
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from dateutil.relativedelta import relativedelta
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text, BLOB, INT, FLOAT
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
from sqlalchemy.orm import sessionmaker, declarative_base
from util import NV
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
Base = declarative_base()
class Site(Base):
__tablename__ = "site"
INITIAL_SITE_KEY_XID = '00000000-0000-0000-0000-000000000000'
INITIAL_SITE_NAME = 'default'
site_key = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, SITE_KEY_XID
name = Column(VARCHAR(length=256), nullable=False)
def __str__(self):
return f'SITE_KEY_XID: {self.site_key}'
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Site.__table__).compile(engine)
@staticmethod
def get_default_site(engine: Engine) -> "Site":
session = sessionmaker(bind=engine)()
entity = session.query(Site).filter(Site.site_key == Site.INITIAL_SITE_KEY_XID).first()
session.close()
return entity
class Instance(Base):
__tablename__ = "instance"
DEFAULT_INSTANCE_REF = '10000000-0000-0000-0000-000000000001'
DEFAULT_TOKEN_EXPIRE_DELTA = 86_400 # 1 day
DEFAULT_LEASE_EXPIRE_DELTA = 7_776_000 # 90 days
DEFAULT_LEASE_RENEWAL_PERIOD = 0.15
DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA = 378_432_000 # 12 years
# 1 day = 86400 (min. in production setup, max 90 days), 1 hour = 3600
instance_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, INSTANCE_REF
site_key = Column(CHAR(length=36), ForeignKey(Site.site_key, ondelete='CASCADE'), nullable=False, index=True) # uuid4
private_key = Column(BLOB(length=2048), nullable=False)
public_key = Column(BLOB(length=512), nullable=False)
token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_TOKEN_EXPIRE_DELTA, comment='in seconds')
lease_expire_delta = Column(INT(), nullable=False, default=DEFAULT_LEASE_EXPIRE_DELTA, comment='in seconds')
lease_renewal_period = Column(FLOAT(precision=2), nullable=False, default=DEFAULT_LEASE_RENEWAL_PERIOD)
client_token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA, comment='in seconds')
__origin = relationship(Site, foreign_keys=[site_key])
def __str__(self):
return f'INSTANCE_REF: {self.instance_ref} (SITE_KEY_XID: {self.site_key})'
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Instance.__table__).compile(engine)
@staticmethod
def create_or_update(engine: Engine, instance: "Instance"):
session = sessionmaker(bind=engine)()
entity = session.query(Instance).filter(Instance.instance_ref == instance.instance_ref).first()
if entity is None:
session.add(instance)
else:
x = dict(
site_key=instance.site_key,
private_key=instance.private_key,
public_key=instance.public_key,
token_expire_delta=instance.token_expire_delta,
lease_expire_delta=instance.lease_expire_delta,
lease_renewal_period=instance.lease_renewal_period,
client_token_expire_delta=instance.client_token_expire_delta,
)
session.execute(update(Instance).where(Instance.instance_ref == instance.instance_ref).values(**x))
session.commit()
session.flush()
session.close()
# todo: validate on startup that "lease_expire_delta" is between 1 day and 90 days
@staticmethod
def get_default_instance(engine: Engine) -> "Instance":
session = sessionmaker(bind=engine)()
site = Site.get_default_site(engine)
entity = session.query(Instance).filter(Instance.site_key == site.site_key).first()
session.close()
return entity
def get_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.token_expire_delta)
def get_lease_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.lease_expire_delta)
def get_lease_renewal_delta(self) -> "datetime.timedelta":
return timedelta(seconds=self.lease_expire_delta)
def get_client_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.client_token_expire_delta)
def __get_private_key(self) -> "RsaKey":
return parse_key(self.private_key)
def get_public_key(self) -> "RsaKey":
return parse_key(self.public_key)
def get_jwt_encode_key(self) -> "jose.jkw":
from jose import jwk
from jose.constants import ALGORITHMS
return jwk.construct(self.__get_private_key().export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_jwt_decode_key(self) -> "jose.jwt":
from jose import jwk
from jose.constants import ALGORITHMS
return jwk.construct(self.get_public_key().export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_private_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
def get_public_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
class Origin(Base):
__tablename__ = "origin"
origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4
# service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF')
hostname = Column(VARCHAR(length=256), nullable=True)
guest_driver_version = Column(VARCHAR(length=10), nullable=True)
@@ -190,7 +66,17 @@ class Origin(Base):
if origin_refs is None:
deletions = session.query(Origin).delete()
else:
deletions = session.query(Origin).filter(Origin.origin_ref in origin_refs).delete()
deletions = session.query(Origin).filter(Origin.origin_ref.in_(origin_refs)).delete()
session.commit()
session.close()
return deletions
@staticmethod
def delete_expired(engine: Engine) -> int:
session = sessionmaker(bind=engine)()
origins = session.query(Origin).join(Lease, Origin.origin_ref == Lease.origin_ref, isouter=True).filter(Lease.lease_ref.is_(None)).all()
origin_refs = [origin.origin_ref for origin in origins]
deletions = session.query(Origin).filter(Origin.origin_ref.in_(origin_refs)).delete()
session.commit()
session.close()
return deletions
@@ -199,24 +85,18 @@ class Origin(Base):
class Lease(Base):
__tablename__ = "lease"
instance_ref = Column(CHAR(length=36), ForeignKey(Instance.instance_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
# scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF')
lease_created = Column(DATETIME(), nullable=False)
lease_expires = Column(DATETIME(), nullable=False)
lease_updated = Column(DATETIME(), nullable=False)
__instance = relationship(Instance, foreign_keys=[instance_ref])
__origin = relationship(Origin, foreign_keys=[origin_ref])
def __repr__(self):
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
def serialize(self) -> dict:
renewal_period = self.__instance.lease_renewal_period
renewal_delta = self.__instance.get_lease_renewal_delta
def serialize(self, renewal_period: float, renewal_delta: timedelta) -> dict:
lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds())
lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal)
@@ -224,10 +104,10 @@ class Lease(Base):
'lease_ref': self.lease_ref,
'origin_ref': self.origin_ref,
# 'scope_ref': self.scope_ref,
'lease_created': self.lease_created.isoformat(),
'lease_expires': self.lease_expires.isoformat(),
'lease_updated': self.lease_updated.isoformat(),
'lease_renewal': lease_renewal.isoformat(),
'lease_created': self.lease_created.replace(tzinfo=timezone.utc).isoformat(),
'lease_expires': self.lease_expires.replace(tzinfo=timezone.utc).isoformat(),
'lease_updated': self.lease_updated.replace(tzinfo=timezone.utc).isoformat(),
'lease_renewal': lease_renewal.replace(tzinfo=timezone.utc).isoformat(),
}
@staticmethod
@@ -326,110 +206,38 @@ class Lease(Base):
return renew
def init_default_site(session: Session):
from app.util import generate_key
private_key = generate_key()
public_key = private_key.public_key()
site = Site(
site_key=Site.INITIAL_SITE_KEY_XID,
name=Site.INITIAL_SITE_NAME
)
session.add(site)
session.commit()
instance = Instance(
instance_ref=Instance.DEFAULT_INSTANCE_REF,
site_key=site.site_key,
private_key=private_key.export_key(),
public_key=public_key.export_key(),
)
session.add(instance)
session.commit()
def init(engine: Engine):
tables = [Site, Instance, Origin, Lease]
tables = [Origin, Lease]
db = inspect(engine)
session = sessionmaker(bind=engine)()
for table in tables:
exists = db.dialect.has_table(engine.connect(), table.__tablename__)
logger.info(f'> Table "{table.__tablename__:<16}" exists: {exists}')
if not exists:
if not db.dialect.has_table(engine.connect(), table.__tablename__):
session.execute(text(str(table.create_statement(engine))))
session.commit()
# create default site
cnt = session.query(Site).count()
if cnt == 0:
init_default_site(session)
session.flush()
session.close()
def migrate(engine: Engine):
from os import getenv as env
from os.path import join, dirname, isfile
from util import load_key
db = inspect(engine)
# todo: add update guide to use 1.LATEST to 2.0
def upgrade_1_x_to_2_0():
site = Site.get_default_site(engine)
logger.info(site)
instance = Instance.get_default_instance(engine)
logger.info(instance)
def upgrade_1_0_to_1_1():
x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
x = next(_ for _ in x if _['name'] == 'origin_ref')
if x['primary_key'] > 0:
print('Found old database schema with "origin_ref" as primary-key in "lease" table. Dropping table!')
print(' Your leases are recreated on next renewal!')
print(' If an error message appears on the client, you can ignore it.')
Lease.__table__.drop(bind=engine)
init(engine)
# SITE_KEY_XID
if site_key := env('SITE_KEY_XID', None) is not None:
site.site_key = str(site_key)
# def upgrade_1_2_to_1_3():
# x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
# x = next((_ for _ in x if _['name'] == 'scope_ref'), None)
# if x is None:
# Lease.scope_ref.compile()
# column_name = Lease.scope_ref.name
# column_type = Lease.scope_ref.type.compile(engine.dialect)
# engine.execute(f'ALTER TABLE "{Lease.__tablename__}" ADD COLUMN "{column_name}" {column_type}')
# INSTANCE_REF
if instance_ref := env('INSTANCE_REF', None) is not None:
instance.instance_ref = str(instance_ref)
# ALLOTMENT_REF
if allotment_ref := env('ALLOTMENT_REF', None) is not None:
pass # todo
# INSTANCE_KEY_RSA, INSTANCE_KEY_PUB
default_instance_private_key_path = str(join(dirname(__file__), 'cert/instance.private.pem'))
instance_private_key = env('INSTANCE_KEY_RSA', None)
if instance_private_key is not None:
instance.private_key = load_key(str(instance_private_key))
elif isfile(default_instance_private_key_path):
instance.private_key = load_key(default_instance_private_key_path)
default_instance_public_key_path = str(join(dirname(__file__), 'cert/instance.public.pem'))
instance_public_key = env('INSTANCE_KEY_PUB', None)
if instance_public_key is not None:
instance.public_key = load_key(str(instance_public_key))
elif isfile(default_instance_public_key_path):
instance.public_key = load_key(default_instance_public_key_path)
# TOKEN_EXPIRE_DELTA
token_expire_delta = env('TOKEN_EXPIRE_DAYS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 86_400
token_expire_delta = env('TOKEN_EXPIRE_HOURS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 3_600
# LEASE_EXPIRE_DELTA, LEASE_RENEWAL_DELTA
lease_expire_delta = env('LEASE_EXPIRE_DAYS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 86_400
lease_expire_delta = env('LEASE_EXPIRE_HOURS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 3_600
# LEASE_RENEWAL_PERIOD
lease_renewal_period = env('LEASE_RENEWAL_PERIOD', None)
if lease_renewal_period is not None:
instance.lease_renewal_period = lease_renewal_period
# todo: update site, instance
upgrade_1_x_to_2_0()
upgrade_1_0_to_1_1()
# upgrade_1_2_to_1_3()

View File

@@ -25,18 +25,6 @@ def load_key(filename: str) -> "RsaKey":
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
def parse_key(content: bytes) -> "RsaKey":
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
return RSA.import_key(extern_key=content, passphrase=None)
def generate_key() -> "RsaKey":
try:
# Crypto | Cryptodome on Debian

View File

@@ -1,8 +1,8 @@
fastapi==0.111.0
uvicorn[standard]==0.29.0
fastapi==0.115.5
uvicorn[standard]==0.32.0
python-jose==3.3.0
pycryptodome==3.20.0
pycryptodome==3.21.0
python-dateutil==2.8.2
sqlalchemy==2.0.30
markdown==3.6
sqlalchemy==2.0.36
markdown==3.7
python-dotenv==1.0.1

View File

@@ -4,7 +4,7 @@ logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
URL = 'https://docs.nvidia.com/grid/'
URL = 'https://docs.nvidia.com/vgpu/index.html'
BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch'
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
@@ -25,15 +25,15 @@ def __driver_versions(html: 'BeautifulSoup'):
return _
# find wrapper for "DriverVersions" and find tables
data = html.find('div', {'id': 'DriverVersions'})
tables = data.findAll('table')
for table in tables:
# parse software-branch (e.g. "vGPU software 17 Releases" and remove " Releases" for "matrix_key")
software_branch = table.parent.find_previous_sibling('button', {'class': 'accordion'}).text.strip()
data = html.find('div', {'id': 'driver-versions'})
items = data.findAll('bsp-accordion', {'class': 'Accordion-items-item'})
for item in items:
software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip()
software_branch = software_branch.replace(' Releases', '')
matrix_key = software_branch.lower()
# driver version info from table-heads (ths) and table-rows (trs)
table = item.find('table')
ths, trs = table.find_all('th'), table.find_all('tr')
headers, releases = [header.text.strip() for header in ths], []
for trs in trs:
@@ -50,7 +50,7 @@ def __driver_versions(html: 'BeautifulSoup'):
def __release_branches(html: 'BeautifulSoup'):
# find wrapper for "AllReleaseBranches" and find table
data = html.find('div', {'id': 'AllReleaseBranches'})
data = html.find('div', {'id': 'all-release-branches'})
table = data.find('table')
# branch releases info from table-heads (ths) and table-rows (trs)

View File

@@ -1,39 +1,36 @@
from os import getenv as env
import sys
from base64 import b64encode as b64enc
from hashlib import sha256
from calendar import timegm
from datetime import datetime
from uuid import UUID, uuid4
from hashlib import sha256
from os.path import dirname, join
from uuid import uuid4, UUID
from dateutil.relativedelta import relativedelta
from jose import jwt
from jose import jwt, jwk
from jose.constants import ALGORITHMS
from starlette.testclient import TestClient
from sqlalchemy import create_engine
import sys
# add relative path to use packages as they were in the app/ dir
sys.path.append('../')
sys.path.append('../app')
from app import main
from app.orm import init as db_init, migrate, Site, Instance
from app.util import load_key
# main.app.add_middleware(PatchMalformedJsonMiddleware, enabled=True)
client = TestClient(main.app)
ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld'
# fastapi setup
client = TestClient(main.app)
# INSTANCE_KEY_RSA = generate_key()
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
# database setup
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
db_init(db), migrate(db)
INSTANCE_KEY_RSA = load_key(str(join(dirname(__file__), '../app/cert/instance.private.pem')))
INSTANCE_KEY_PUB = load_key(str(join(dirname(__file__), '../app/cert/instance.public.pem')))
# test vars
DEFAULT_SITE, DEFAULT_INSTANCE = Site.get_default_site(db), Instance.get_default_instance(db)
SITE_KEY = DEFAULT_SITE.site_key
jwt_encode_key, jwt_decode_key = DEFAULT_INSTANCE.get_jwt_encode_key(), DEFAULT_INSTANCE.get_jwt_decode_key()
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def __bearer_token(origin_ref: str) -> str:
@@ -42,12 +39,6 @@ def __bearer_token(origin_ref: str) -> str:
return token
def test_initial_default_site_and_instance():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
assert default_site.site_key == Site.INITIAL_SITE_KEY_XID
assert default_instance.instance_ref == Instance.DEFAULT_INSTANCE_REF
def test_index():
response = client.get('/')
assert response.status_code == 200
@@ -116,6 +107,15 @@ def test_auth_v1_origin():
assert response.json().get('origin_ref') == ORIGIN_REF
def test_auth_v1_origin_malformed_json(): # see oscar.krause/fastapi-dls#1
from middleware import PatchMalformedJsonMiddleware
# test regex (temporary, until this section is merged into main.py
s = '{"environment": {"fingerprint": {"mac_address_list": [ff:ff:ff:ff:ff:ff"]}}'
replaced = PatchMalformedJsonMiddleware.fix_json(s)
assert replaced == '{"environment": {"fingerprint": {"mac_address_list": ["ff:ff:ff:ff:ff:ff"]}}'
def auth_v1_origin_update():
payload = {
"registration_pending": False,
@@ -163,7 +163,8 @@ def test_auth_v1_token():
"kid": "00000000-0000-0000-0000-000000000000"
}
payload = {
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256),
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')},
algorithm=ALGORITHMS.RS256),
"code_verifier": SECRET,
}