mirror of
https://git.collinwebdesigns.de/oscar.krause/fastapi-dls.git
synced 2024-11-23 23:41:06 +03:00
Merge branch 'refs/heads/dev' into db
# Conflicts: # .gitlab-ci.yml # Dockerfile # README.md # app/main.py # app/orm.py # requirements.txt
This commit is contained in:
commit
b0b627a3f0
10
.DEBIAN/requirements-ubuntu-24.04.txt
Normal file
10
.DEBIAN/requirements-ubuntu-24.04.txt
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
# https://packages.ubuntu.com
|
||||||
|
fastapi==0.101.0
|
||||||
|
uvicorn[standard]==0.27.1
|
||||||
|
python-jose[pycryptodome]==3.3.0
|
||||||
|
pycryptodome==3.20.0
|
||||||
|
python-dateutil==2.8.2
|
||||||
|
sqlalchemy==1.4.50
|
||||||
|
markdown==3.5.2
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
jinja2==3.1.2
|
@ -126,7 +126,7 @@ build:pacman:
|
|||||||
- "*.pkg.tar.zst"
|
- "*.pkg.tar.zst"
|
||||||
|
|
||||||
test:
|
test:
|
||||||
image: python:3.11-slim-bookworm
|
image: $IMAGE
|
||||||
stage: test
|
stage: test
|
||||||
interruptible: true
|
interruptible: true
|
||||||
rules:
|
rules:
|
||||||
@ -141,10 +141,12 @@ test:
|
|||||||
DATABASE: sqlite:///../app/db.sqlite
|
DATABASE: sqlite:///../app/db.sqlite
|
||||||
parallel:
|
parallel:
|
||||||
matrix:
|
matrix:
|
||||||
- REQUIREMENTS:
|
- IMAGE: [ 'python:3.11-slim-bookworm', 'python:3.12-slim-bullseye' ]
|
||||||
|
REQUIREMENTS:
|
||||||
- requirements.txt
|
- requirements.txt
|
||||||
- .DEBIAN/requirements-bookworm-12.txt
|
- .DEBIAN/requirements-bookworm-12.txt
|
||||||
- .DEBIAN/requirements-ubuntu-23.10.txt
|
- .DEBIAN/requirements-ubuntu-23.10.txt
|
||||||
|
- .DEBIAN/requirements-ubuntu-24.04.txt
|
||||||
before_script:
|
before_script:
|
||||||
- apt-get update && apt-get install -y python3-dev gcc
|
- apt-get update && apt-get install -y python3-dev gcc
|
||||||
- pip install -r $REQUIREMENTS
|
- pip install -r $REQUIREMENTS
|
||||||
@ -205,7 +207,7 @@ test:debian:
|
|||||||
|
|
||||||
test:ubuntu:
|
test:ubuntu:
|
||||||
extends: .test:linux
|
extends: .test:linux
|
||||||
image: ubuntu:23.10
|
image: ubuntu:24.04
|
||||||
|
|
||||||
test:archlinux:
|
test:archlinux:
|
||||||
image: archlinux:base
|
image: archlinux:base
|
||||||
@ -295,17 +297,13 @@ gemnasium-python-dependency_scanning:
|
|||||||
|
|
||||||
deploy:docker:
|
deploy:docker:
|
||||||
extends: .deploy
|
extends: .deploy
|
||||||
image: docker:dind
|
|
||||||
stage: deploy
|
stage: deploy
|
||||||
tags: [ docker ]
|
|
||||||
before_script:
|
before_script:
|
||||||
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
|
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
|
||||||
- docker buildx inspect
|
|
||||||
- docker buildx create --use
|
|
||||||
script:
|
script:
|
||||||
- echo "========== GitLab-Registry =========="
|
- echo "========== GitLab-Registry =========="
|
||||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||||
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH
|
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME
|
||||||
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME --push .
|
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME --push .
|
||||||
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest --push .
|
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest --push .
|
||||||
- echo "========== Docker-Hub =========="
|
- echo "========== Docker-Hub =========="
|
||||||
|
@ -10,7 +10,7 @@ RUN apk update \
|
|||||||
&& apk add --no-cache --virtual build-deps gcc g++ python3-dev musl-dev pkgconfig \
|
&& apk add --no-cache --virtual build-deps gcc g++ python3-dev musl-dev pkgconfig \
|
||||||
&& apk add --no-cache curl postgresql postgresql-dev mariadb-dev sqlite-dev \
|
&& apk add --no-cache curl postgresql postgresql-dev mariadb-dev sqlite-dev \
|
||||||
&& pip install --no-cache-dir --upgrade uvicorn \
|
&& pip install --no-cache-dir --upgrade uvicorn \
|
||||||
&& pip install --no-cache-dir psycopg2==2.9.6 mysqlclient==2.2.0 pysqlite3==0.5.1 \
|
&& pip install --no-cache-dir psycopg2==2.9.9 mysqlclient==2.2.4 pysqlite3==0.5.2 \
|
||||||
&& pip install --no-cache-dir -r /tmp/requirements.txt \
|
&& pip install --no-cache-dir -r /tmp/requirements.txt \
|
||||||
&& apk del build-deps
|
&& apk del build-deps
|
||||||
|
|
||||||
|
77
README.md
77
README.md
@ -9,15 +9,24 @@ Only the clients need a connection to this service on configured port.
|
|||||||
|
|
||||||
**Official Links**
|
**Official Links**
|
||||||
|
|
||||||
- https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
|
* https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
|
||||||
- https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
|
* https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
|
||||||
- https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
|
* https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
|
||||||
|
|
||||||
*All other repositories are forks! (which is no bad - just for information and bug reports)*
|
*All other repositories are forks! (which is no bad - just for information and bug reports)*
|
||||||
|
|
||||||
|
[Releases & Release Notes](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/releases)
|
||||||
|
|
||||||
|
**Further Reading**
|
||||||
|
|
||||||
|
* [NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox) - This document serves as a guide to install NVIDIA vGPU host drivers on the latest Proxmox VE version
|
||||||
|
* [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock) - Unlock vGPU functionality for consumer-grade Nvidia GPUs.
|
||||||
|
* [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q) - Guide for `vgpu_unlock`
|
||||||
|
* [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/) - Also known as `proxmox-installer.sh`
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
[[_TOC_]]
|
[TOC]
|
||||||
|
|
||||||
# Setup (Service)
|
# Setup (Service)
|
||||||
|
|
||||||
@ -102,7 +111,7 @@ volumes:
|
|||||||
dls-db:
|
dls-db:
|
||||||
```
|
```
|
||||||
|
|
||||||
## Debian/Ubuntu/macOS (manual method using `git clone` and python virtual environment)
|
## Debian / Ubuntu / macOS (manual method using `git clone` and python virtual environment)
|
||||||
|
|
||||||
Tested on `Debian 11 (bullseye)`, `Debian 12 (bookworm)` and `macOS Ventura (13.6)`, Ubuntu may also work.
|
Tested on `Debian 11 (bullseye)`, `Debian 12 (bookworm)` and `macOS Ventura (13.6)`, Ubuntu may also work.
|
||||||
**Please note that setup on macOS differs from Debian based systems.**
|
**Please note that setup on macOS differs from Debian based systems.**
|
||||||
@ -309,7 +318,7 @@ EOF
|
|||||||
Now you have to run `systemctl daemon-reload`. After that you can start service
|
Now you have to run `systemctl daemon-reload`. After that you can start service
|
||||||
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
||||||
|
|
||||||
## Debian/Ubuntu (using `dpkg`)
|
## Debian / Ubuntu (using `dpkg` / `apt`)
|
||||||
|
|
||||||
Packages are available here:
|
Packages are available here:
|
||||||
|
|
||||||
@ -317,10 +326,11 @@ Packages are available here:
|
|||||||
|
|
||||||
Successful tested with:
|
Successful tested with:
|
||||||
|
|
||||||
- Debian 12 (Bookworm)
|
- Debian 12 (Bookworm) (EOL: tba.)
|
||||||
- Ubuntu 22.10 (Kinetic Kudu) (EOL: July 20, 2023)
|
- Ubuntu 22.10 (Kinetic Kudu) (EOL: July 20, 2023)
|
||||||
- Ubuntu 23.04 (Lunar Lobster) (EOL: January 2024)
|
- Ubuntu 23.04 (Lunar Lobster) (EOL: January 2024)
|
||||||
- Ubuntu 23.10 (Mantic Minotaur) (EOL: July 2024)
|
- Ubuntu 23.10 (Mantic Minotaur) (EOL: July 2024)
|
||||||
|
- Ubuntu 24.04 (Noble Numbat) (EOL: April 2036)
|
||||||
|
|
||||||
Not working with:
|
Not working with:
|
||||||
|
|
||||||
@ -416,9 +426,9 @@ After first success you have to replace `--issue` with `--renew`.
|
|||||||
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
|
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
|
||||||
client has 19.2 hours in which to re-establish connectivity before its license expires.
|
client has 19.2 hours in which to re-establish connectivity before its license expires.
|
||||||
|
|
||||||
\*3 Always use `https`, since guest-drivers only support secure connections!
|
\*2 Always use `https`, since guest-drivers only support secure connections!
|
||||||
|
|
||||||
\*4 If you recreate instance keys you need to **recreate client-token for each guest**!
|
\*3 If you recreate your instance keys you need to **recreate client-token for each guest**!
|
||||||
|
|
||||||
# Setup (Client)
|
# Setup (Client)
|
||||||
|
|
||||||
@ -426,26 +436,31 @@ client has 19.2 hours in which to re-establish connectivity before its license e
|
|||||||
|
|
||||||
Successfully tested with this package versions:
|
Successfully tested with this package versions:
|
||||||
|
|
||||||
| vGPU Suftware | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date |
|
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|
||||||
|---------------|--------------------|--------------|----------------|---------------|
|
|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:|
|
||||||
| `16.3` | `535.154.02` | `535.154.05` | `538.15` | January 2024 |
|
| `17.2` | R550 | `550.90.05` | `550.90.07` | `552.55` | June 2024 | February 2025 |
|
||||||
| `16.2` | `535.129.03` | `535.129.03` | `537.70` | October 2023 |
|
| `17.1` | R550 | `550.54.16` | `550.54.15` | `551.78` | March 2024 | |
|
||||||
| `16.1` | `535.104.06` | `535.104.05` | `537.13` | August 2023 |
|
| `17.0` | R550 | `550.54.10` | `550.54.14` | `551.61` | February 2024 | |
|
||||||
| `16.0` | `535.54.06` | `535.54.03` | `536.22` | July 2023 |
|
| `16.6` | R535 | `535.183.04` | `535.183.01` | `538.67` | June 2024 | July 2026 |
|
||||||
| `15.3` | `525.125.03` | `525.125.06` | `529.11` | June 2023 |
|
| `16.5` | R535 | `535.161.05` | `535.161.08` | `538.46` | February 2024 | |
|
||||||
| `15.2` | `525.105.14` | `525.105.17` | `528.89` | March 2023 |
|
| `16.4` | R535 | `535.161.05` | `535.161.07` | `538.33` | February 2024 | |
|
||||||
| `15.1` | `525.85.07` | `525.85.05` | `528.24` | January 2023 |
|
| `16.3` | R535 | `535.154.02` | `535.154.05` | `538.15` | January 2024 | |
|
||||||
| `15.0` | `525.60.12` | `525.60.13` | `527.41` | December 2022 |
|
| `16.2` | R535 | `535.129.03` | `535.129.03` | `537.70` | October 2023 | |
|
||||||
| `14.4` | `510.108.03` | `510.108.03` | `514.08` | December 2022 |
|
| `16.1` | R535 | `535.104.06` | `535.104.05` | `537.13` | August 2023 | |
|
||||||
| `14.3` | `510.108.03` | `510.108.03` | `513.91` | November 2022 |
|
| `16.0` | R535 | `535.54.06` | `535.54.03` | `536.22` | July 2023 | |
|
||||||
|
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | October 2023 |
|
||||||
|
| `15.3` | R525 | `525.125.03` | `525.125.06` | `529.11` | June 2023 | |
|
||||||
|
| `15.2` | R525 | `525.105.14` | `525.105.17` | `528.89` | March 2023 | |
|
||||||
|
| `15.1` | R525 | `525.85.07` | `525.85.05` | `528.24` | January 2023 | |
|
||||||
|
| `15.0` | R525 | `525.60.12` | `525.60.13` | `527.41` | December 2022 | |
|
||||||
|
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
|
||||||
|
| `14.3` | R510 | `510.108.03` | `510.108.03` | `513.91` | November 2022 | |
|
||||||
|
|
||||||
- https://docs.nvidia.com/grid/index.html
|
- https://docs.nvidia.com/grid/index.html
|
||||||
|
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
|
||||||
|
|
||||||
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`) on channel `licensing` `biggerthanshit`
|
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`) on channel `licensing` `biggerthanshit`
|
||||||
|
|
||||||
|
|
||||||
https://archive.biggerthanshit.com/NVIDIA/ (nvidia / b1gg3rth4nsh1t)
|
|
||||||
|
|
||||||
## Linux
|
## Linux
|
||||||
|
|
||||||
Download *client-token* and place it into `/etc/nvidia/ClientConfigToken`:
|
Download *client-token* and place it into `/etc/nvidia/ClientConfigToken`:
|
||||||
@ -579,7 +594,7 @@ Generate client token, (see [installation](#installation)).
|
|||||||
There are many other internal api endpoints for handling authentication and lease process.
|
There are many other internal api endpoints for handling authentication and lease process.
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
# Troubleshoot
|
# Troubleshoot / Debug
|
||||||
|
|
||||||
**Please make sure that fastapi-dls and your guests are on the same timezone!**
|
**Please make sure that fastapi-dls and your guests are on the same timezone!**
|
||||||
|
|
||||||
@ -735,6 +750,12 @@ Thanks to vGPU community and all who uses this project and report bugs.
|
|||||||
|
|
||||||
Special thanks to
|
Special thanks to
|
||||||
|
|
||||||
- @samicrusader who created build file for ArchLinux
|
- @samicrusader who created build file for **ArchLinux**
|
||||||
- @cyrus who wrote the section for openSUSE
|
- @cyrus who wrote the section for **openSUSE**
|
||||||
- @midi who wrote the section for unRAID
|
- @midi who wrote the section for **unRAID**
|
||||||
|
- @polloloco who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
|
||||||
|
- @DualCoder who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
|
||||||
|
- Krutav Shah who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
|
||||||
|
- Wim van 't Hoog for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
|
||||||
|
|
||||||
|
And thanks to all people who contributed to all these libraries!
|
||||||
|
111
app/main.py
111
app/main.py
@ -1,50 +1,80 @@
|
|||||||
import logging
|
import logging
|
||||||
from base64 import b64encode as b64enc
|
from base64 import b64encode as b64enc
|
||||||
|
from calendar import timegm
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from datetime import datetime, timedelta
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from uuid import uuid4
|
from json import loads as json_loads
|
||||||
from os.path import join, dirname
|
|
||||||
from os import getenv as env
|
from os import getenv as env
|
||||||
|
from os.path import join, dirname
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.requests import Request
|
from fastapi.requests import Request
|
||||||
from json import loads as json_loads
|
from jose import jws, jwk, jwt, JWTError
|
||||||
from datetime import datetime
|
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
from calendar import timegm
|
|
||||||
from jose import jws, jwt, JWTError
|
|
||||||
from jose.constants import ALGORITHMS
|
from jose.constants import ALGORITHMS
|
||||||
from starlette.middleware.cors import CORSMiddleware
|
|
||||||
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
|
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from starlette.middleware.cors import CORSMiddleware
|
||||||
|
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
|
||||||
|
|
||||||
from orm import init as db_init, migrate, Site, Instance, Origin, Lease
|
from orm import init as db_init, migrate, Site, Instance, Origin, Lease
|
||||||
|
|
||||||
|
# Load variables
|
||||||
load_dotenv('../version.env')
|
load_dotenv('../version.env')
|
||||||
|
|
||||||
# get local timezone
|
# Get current timezone
|
||||||
TZ = datetime.now().astimezone().tzinfo
|
TZ = datetime.now().astimezone().tzinfo
|
||||||
|
|
||||||
# fetch version info
|
# Load basic variables
|
||||||
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
|
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
|
||||||
|
|
||||||
# fastapi setup
|
# Database connection
|
||||||
config = dict(openapi_url='/-/openapi.json', docs_url=None, redoc_url=None)
|
|
||||||
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
|
|
||||||
|
|
||||||
# database setup
|
|
||||||
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
|
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
|
||||||
db_init(db), migrate(db)
|
db_init(db), migrate(db)
|
||||||
|
|
||||||
# DLS setup (static)
|
# Load DLS variables (all prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service)
|
||||||
DLS_URL = str(env('DLS_URL', 'localhost'))
|
DLS_URL = str(env('DLS_URL', 'localhost'))
|
||||||
DLS_PORT = int(env('DLS_PORT', '443'))
|
DLS_PORT = int(env('DLS_PORT', '443'))
|
||||||
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
|
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
|
||||||
|
|
||||||
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo
|
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo
|
||||||
|
|
||||||
# fastapi middleware
|
|
||||||
|
# FastAPI
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(_: FastAPI):
|
||||||
|
# on startup
|
||||||
|
default_instance = Instance.get_default_instance(db)
|
||||||
|
|
||||||
|
lease_renewal_period = default_instance.lease_renewal_period
|
||||||
|
lease_renewal_delta = default_instance.get_lease_renewal_delta()
|
||||||
|
client_token_expire_delta = default_instance.get_client_token_expire_delta()
|
||||||
|
|
||||||
|
logger.info(f'''
|
||||||
|
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
|
||||||
|
|
||||||
|
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
|
||||||
|
If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
|
||||||
|
|
||||||
|
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
|
||||||
|
''')
|
||||||
|
|
||||||
|
logger.info(f'Debug is {"enabled" if DEBUG else "disabled"}.')
|
||||||
|
|
||||||
|
validate_settings()
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# on shutdown
|
||||||
|
logger.info(f'Shutting down ...')
|
||||||
|
|
||||||
|
|
||||||
|
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
|
||||||
|
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, lifespan=lifespan, **config)
|
||||||
|
|
||||||
app.debug = DEBUG
|
app.debug = DEBUG
|
||||||
app.add_middleware(
|
app.add_middleware(
|
||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
@ -54,10 +84,20 @@ app.add_middleware(
|
|||||||
allow_headers=['*'],
|
allow_headers=['*'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# logging
|
# Logging
|
||||||
logging.basicConfig()
|
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
|
||||||
|
logging.basicConfig(format='[{levelname:^7}] [{module:^15}] {message}', style='{')
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
|
logger.setLevel(LOG_LEVEL)
|
||||||
|
logging.getLogger('util').setLevel(LOG_LEVEL)
|
||||||
|
logging.getLogger('NV').setLevel(LOG_LEVEL)
|
||||||
|
|
||||||
|
|
||||||
|
# Helper
|
||||||
|
def __get_token(request: Request) -> dict:
|
||||||
|
authorization_header = request.headers.get('authorization')
|
||||||
|
token = authorization_header.split(' ')[1]
|
||||||
|
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
||||||
|
|
||||||
|
|
||||||
def validate_settings():
|
def validate_settings():
|
||||||
@ -72,11 +112,7 @@ def validate_settings():
|
|||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
|
# Endpoints
|
||||||
authorization_header = request.headers.get('authorization')
|
|
||||||
token = authorization_header.split(' ')[1]
|
|
||||||
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
|
||||||
|
|
||||||
|
|
||||||
@app.get('/', summary='Index')
|
@app.get('/', summary='Index')
|
||||||
async def index():
|
async def index():
|
||||||
@ -118,8 +154,7 @@ async def _config():
|
|||||||
async def _readme():
|
async def _readme():
|
||||||
from markdown import markdown
|
from markdown import markdown
|
||||||
from util import load_file
|
from util import load_file
|
||||||
|
content = load_file(join(dirname(__file__), '../README.md')).decode('utf-8')
|
||||||
content = load_file('../README.md').decode('utf-8')
|
|
||||||
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
|
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
|
||||||
|
|
||||||
|
|
||||||
@ -595,26 +630,6 @@ async def leasing_v1_lessor_shutdown(request: Request):
|
|||||||
return JSONr(response)
|
return JSONr(response)
|
||||||
|
|
||||||
|
|
||||||
@app.on_event('startup')
|
|
||||||
async def app_on_startup():
|
|
||||||
default_instance = Instance.get_default_instance(db)
|
|
||||||
|
|
||||||
lease_renewal_period = default_instance.lease_renewal_period
|
|
||||||
lease_renewal_delta = default_instance.get_lease_renewal_delta()
|
|
||||||
client_token_expire_delta = default_instance.get_client_token_expire_delta()
|
|
||||||
|
|
||||||
logger.info(f'''
|
|
||||||
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
|
|
||||||
|
|
||||||
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
|
|
||||||
If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
|
|
||||||
|
|
||||||
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
|
|
||||||
''')
|
|
||||||
|
|
||||||
validate_settings()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from dateutil.relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text, BLOB, INT, FLOAT
|
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text
|
||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
|
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
|
||||||
|
|
||||||
from app.util import parse_key
|
from util import NV
|
||||||
|
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -148,6 +149,8 @@ class Origin(Base):
|
|||||||
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
|
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
|
||||||
|
|
||||||
def serialize(self) -> dict:
|
def serialize(self) -> dict:
|
||||||
|
_ = NV().find(self.guest_driver_version)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'origin_ref': self.origin_ref,
|
'origin_ref': self.origin_ref,
|
||||||
# 'service_instance_xid': self.service_instance_xid,
|
# 'service_instance_xid': self.service_instance_xid,
|
||||||
@ -155,6 +158,7 @@ class Origin(Base):
|
|||||||
'guest_driver_version': self.guest_driver_version,
|
'guest_driver_version': self.guest_driver_version,
|
||||||
'os_platform': self.os_platform,
|
'os_platform': self.os_platform,
|
||||||
'os_version': self.os_version,
|
'os_version': self.os_version,
|
||||||
|
'$driver': _ if _ is not None else None,
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
60
app/util.py
60
app/util.py
@ -1,10 +1,17 @@
|
|||||||
def load_file(filename) -> bytes:
|
import logging
|
||||||
|
|
||||||
|
logging.basicConfig()
|
||||||
|
|
||||||
|
|
||||||
|
def load_file(filename: str) -> bytes:
|
||||||
|
log = logging.getLogger(f'{__name__}')
|
||||||
|
log.debug(f'Loading contents of file "{filename}')
|
||||||
with open(filename, 'rb') as file:
|
with open(filename, 'rb') as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
|
||||||
def load_key(filename) -> "RsaKey":
|
def load_key(filename: str) -> "RsaKey":
|
||||||
try:
|
try:
|
||||||
# Crypto | Cryptodome on Debian
|
# Crypto | Cryptodome on Debian
|
||||||
from Crypto.PublicKey import RSA
|
from Crypto.PublicKey import RSA
|
||||||
@ -13,6 +20,8 @@ def load_key(filename) -> "RsaKey":
|
|||||||
from Cryptodome.PublicKey import RSA
|
from Cryptodome.PublicKey import RSA
|
||||||
from Cryptodome.PublicKey.RSA import RsaKey
|
from Cryptodome.PublicKey.RSA import RsaKey
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
log.debug(f'Importing RSA-Key from "{filename}"')
|
||||||
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
|
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
|
||||||
|
|
||||||
|
|
||||||
@ -36,5 +45,50 @@ def generate_key() -> "RsaKey":
|
|||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
from Cryptodome.PublicKey import RSA
|
from Cryptodome.PublicKey import RSA
|
||||||
from Cryptodome.PublicKey.RSA import RsaKey
|
from Cryptodome.PublicKey.RSA import RsaKey
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
log.debug(f'Generating RSA-Key')
|
||||||
return RSA.generate(bits=2048)
|
return RSA.generate(bits=2048)
|
||||||
|
|
||||||
|
|
||||||
|
class NV:
|
||||||
|
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
|
||||||
|
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.log = logging.getLogger(self.__class__.__name__)
|
||||||
|
|
||||||
|
if NV.__DRIVER_MATRIX is None:
|
||||||
|
from json import load as json_load
|
||||||
|
try:
|
||||||
|
file = open(NV.__DRIVER_MATRIX_FILENAME)
|
||||||
|
NV.__DRIVER_MATRIX = json_load(file)
|
||||||
|
file.close()
|
||||||
|
self.log.debug(f'Successfully loaded "{NV.__DRIVER_MATRIX_FILENAME}".')
|
||||||
|
except Exception as e:
|
||||||
|
NV.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
|
||||||
|
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find(version: str) -> dict | None:
|
||||||
|
if NV.__DRIVER_MATRIX is None:
|
||||||
|
return None
|
||||||
|
for idx, (key, branch) in enumerate(NV.__DRIVER_MATRIX.items()):
|
||||||
|
for release in branch.get('$releases'):
|
||||||
|
linux_driver = release.get('Linux Driver')
|
||||||
|
windows_driver = release.get('Windows Driver')
|
||||||
|
if version == linux_driver or version == windows_driver:
|
||||||
|
tmp = branch.copy()
|
||||||
|
tmp.pop('$releases')
|
||||||
|
|
||||||
|
is_latest = release.get('vGPU Software') == branch.get('Latest Release in Branch')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'software_branch': branch.get('vGPU Software Branch'),
|
||||||
|
'branch_version': release.get('vGPU Software'),
|
||||||
|
'driver_branch': branch.get('Driver Branch'),
|
||||||
|
'branch_status': branch.get('vGPU Branch Status'),
|
||||||
|
'release_date': release.get('Release Date'),
|
||||||
|
'eol': branch.get('EOL Date') if is_latest else None,
|
||||||
|
'is_latest': is_latest,
|
||||||
|
}
|
||||||
|
return None
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
fastapi==0.110.0
|
fastapi==0.111.0
|
||||||
uvicorn[standard]==0.27.1
|
uvicorn[standard]==0.29.0
|
||||||
python-jose==3.3.0
|
python-jose==3.3.0
|
||||||
pycryptodome==3.20.0
|
pycryptodome==3.20.0
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
sqlalchemy==2.0.27
|
sqlalchemy==2.0.30
|
||||||
markdown==3.5.2
|
markdown==3.6
|
||||||
python-dotenv==1.0.1
|
python-dotenv==1.0.1
|
||||||
|
137
test/create_driver_matrix_json.py
Normal file
137
test/create_driver_matrix_json.py
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
logging.basicConfig()
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
URL = 'https://docs.nvidia.com/grid/'
|
||||||
|
|
||||||
|
BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch'
|
||||||
|
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
|
||||||
|
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
|
||||||
|
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
|
||||||
|
ALT_VGPU_MANAGER_KEY = 'vGPU Manager'
|
||||||
|
RELEASE_DATE_KEY, LATEST_KEY, EOL_KEY = 'Release Date', 'Latest Release in Branch', 'EOL Date'
|
||||||
|
JSON_RELEASES_KEY = '$releases'
|
||||||
|
|
||||||
|
|
||||||
|
def __driver_versions(html: 'BeautifulSoup'):
|
||||||
|
def __strip(_: str) -> str:
|
||||||
|
# removes content after linebreak (e.g. "Hello\n World" to "Hello")
|
||||||
|
_ = _.strip()
|
||||||
|
tmp = _.split('\n')
|
||||||
|
if len(tmp) > 0:
|
||||||
|
return tmp[0]
|
||||||
|
return _
|
||||||
|
|
||||||
|
# find wrapper for "DriverVersions" and find tables
|
||||||
|
data = html.find('div', {'id': 'DriverVersions'})
|
||||||
|
tables = data.findAll('table')
|
||||||
|
for table in tables:
|
||||||
|
# parse software-branch (e.g. "vGPU software 17 Releases" and remove " Releases" for "matrix_key")
|
||||||
|
software_branch = table.parent.find_previous_sibling('button', {'class': 'accordion'}).text.strip()
|
||||||
|
software_branch = software_branch.replace(' Releases', '')
|
||||||
|
matrix_key = software_branch.lower()
|
||||||
|
|
||||||
|
# driver version info from table-heads (ths) and table-rows (trs)
|
||||||
|
ths, trs = table.find_all('th'), table.find_all('tr')
|
||||||
|
headers, releases = [header.text.strip() for header in ths], []
|
||||||
|
for trs in trs:
|
||||||
|
tds = trs.find_all('td')
|
||||||
|
if len(tds) == 0: # skip empty
|
||||||
|
continue
|
||||||
|
# create dict with table-heads as key and cell content as value
|
||||||
|
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
|
||||||
|
releases.append(x)
|
||||||
|
|
||||||
|
# add to matrix
|
||||||
|
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
|
||||||
|
|
||||||
|
|
||||||
|
def __release_branches(html: 'BeautifulSoup'):
|
||||||
|
# find wrapper for "AllReleaseBranches" and find table
|
||||||
|
data = html.find('div', {'id': 'AllReleaseBranches'})
|
||||||
|
table = data.find('table')
|
||||||
|
|
||||||
|
# branch releases info from table-heads (ths) and table-rows (trs)
|
||||||
|
ths, trs = table.find_all('th'), table.find_all('tr')
|
||||||
|
headers = [header.text.strip() for header in ths]
|
||||||
|
for trs in trs:
|
||||||
|
tds = trs.find_all('td')
|
||||||
|
if len(tds) == 0: # skip empty
|
||||||
|
continue
|
||||||
|
# create dict with table-heads as key and cell content as value
|
||||||
|
x = {headers[i]: cell.text.strip() for i, cell in enumerate(tds)}
|
||||||
|
|
||||||
|
# get matrix_key
|
||||||
|
software_branch = x.get(SOFTWARE_BRANCH_KEY)
|
||||||
|
matrix_key = software_branch.lower()
|
||||||
|
|
||||||
|
# add to matrix
|
||||||
|
MATRIX.update({matrix_key: MATRIX.get(matrix_key) | x})
|
||||||
|
|
||||||
|
|
||||||
|
def __debug():
|
||||||
|
# print table head
|
||||||
|
s = f'{SOFTWARE_BRANCH_KEY:^21} | {BRANCH_STATUS_KEY:^21} | {VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {EOL_KEY:>21}'
|
||||||
|
print(s)
|
||||||
|
|
||||||
|
# iterate over dict & format some variables to not overload table
|
||||||
|
for idx, (key, branch) in enumerate(MATRIX.items()):
|
||||||
|
branch_status = branch.get(BRANCH_STATUS_KEY)
|
||||||
|
branch_status = branch_status.replace('Branch ', '')
|
||||||
|
branch_status = branch_status.replace('Long-Term Support', 'LTS')
|
||||||
|
branch_status = branch_status.replace('Production', 'Prod.')
|
||||||
|
|
||||||
|
software_branch = branch.get(SOFTWARE_BRANCH_KEY).replace('NVIDIA ', '')
|
||||||
|
for release in branch.get(JSON_RELEASES_KEY):
|
||||||
|
version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
|
||||||
|
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
|
||||||
|
linux_driver = release.get(LINUX_DRIVER_KEY)
|
||||||
|
windows_manager = release.get(WINDOWS_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
|
||||||
|
windows_driver = release.get(WINDOWS_DRIVER_KEY)
|
||||||
|
release_date = release.get(RELEASE_DATE_KEY)
|
||||||
|
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
|
||||||
|
|
||||||
|
version = f'{version} *' if is_latest else version
|
||||||
|
eol = branch.get(EOL_KEY) if is_latest else ''
|
||||||
|
s = f'{software_branch:^21} | {branch_status:^21} | {version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {eol:>21}'
|
||||||
|
print(s)
|
||||||
|
|
||||||
|
|
||||||
|
def __dump(filename: str):
|
||||||
|
import json
|
||||||
|
|
||||||
|
file = open(filename, 'w')
|
||||||
|
json.dump(MATRIX, file)
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
MATRIX = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
import httpx
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f'Failed to import module: {e}')
|
||||||
|
logger.info('Run "pip install beautifulsoup4 httpx"')
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
r = httpx.get(URL)
|
||||||
|
if r.status_code != 200:
|
||||||
|
logger.error(f'Error loading "{URL}" with status code {r.status_code}.')
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
# parse html
|
||||||
|
soup = BeautifulSoup(r.text, features='html.parser')
|
||||||
|
|
||||||
|
# build matrix
|
||||||
|
__driver_versions(soup)
|
||||||
|
__release_branches(soup)
|
||||||
|
|
||||||
|
# debug output
|
||||||
|
__debug()
|
||||||
|
|
||||||
|
# dump data to file
|
||||||
|
__dump('../app/static/driver_matrix.json')
|
Loading…
Reference in New Issue
Block a user