diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 285418e0..561c2006 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -1,6 +1,9 @@ # This is a basic workflow to help you get started with Actions name: Build +permissions: + contents: read + packages: write # Controls when the action will run. Triggers the workflow on push or pull request # events but only for the master branch @@ -68,6 +71,22 @@ jobs: poetry run coverage run --branch -m pytest -s -v poetry run coverage xml + - uses: actions/setup-python@v6 + with: + python-version: "3.14" + - name: Test coverage for CSIT OAS Validation API + run: | + export PATH=/root/.local/bin:$PATH + sudo apt install -y nodejs npm + sudo npm install -g @stoplight/spectral-cli@6.14.2 + cd microservices/csitOasValidationApi + poetry env use python3.14 + poetry install --no-root + ./checkout-ruleset-tags.sh ruleset_tag_cache || true + export GITHUB_TAG_CACHE_PATH="$(realpath -m ./ruleset_tag_cache)" + poetry run coverage run --branch -m pytest -s -v + poetry run coverage xml + - name: SonarCloud Scan uses: sonarsource/sonarqube-scan-action@master env: @@ -93,19 +112,31 @@ jobs: - name: Skip message if: steps.check.outputs.build_needed == 'false' run: echo "No changes in gatewayApi, skipping build" - - uses: docker/build-push-action@v1 + - name: Sanitize tag name + if: steps.check.outputs.build_needed == 'true' + id: tag + run: echo "tag=$(echo '${{ github.ref_name }}' | tr '/' '-')" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + if: steps.check.outputs.build_needed == 'true' + uses: docker/setup-buildx-action@v3 + - name: Log in to GitHub Container Registry if: steps.check.outputs.build_needed == 'true' + uses: docker/login-action@v3 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR + registry: ghcr.io + username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-gateway-api - path: microservices/gatewayApi - dockerfile: microservices/gatewayApi/Dockerfile - tag_with_ref: true - tag_with_sha: false - add_git_labels: true + logout: false + - uses: docker/build-push-action@v5 + if: steps.check.outputs.build_needed == 'true' + with: + context: microservices/gatewayApi + file: microservices/gatewayApi/Dockerfile push: true + tags: ghcr.io/bcgov/gwa-api/gwa-gateway-api:${{ steps.tag.outputs.tag }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} gwa-scheduler: needs: sonar-scan @@ -126,19 +157,31 @@ jobs: - name: Skip message if: steps.check.outputs.build_needed == 'false' run: echo "No changes in gatewayJobScheduler, skipping build" - - uses: docker/build-push-action@v1 + - name: Sanitize tag name + if: steps.check.outputs.build_needed == 'true' + id: tag + run: echo "tag=$(echo '${{ github.ref_name }}' | tr '/' '-')" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx if: steps.check.outputs.build_needed == 'true' + uses: docker/setup-buildx-action@v3 + - name: Log in to GitHub Container Registry + if: steps.check.outputs.build_needed == 'true' + uses: docker/login-action@v3 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR + registry: ghcr.io + username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-scheduler - path: microservices/gatewayJobScheduler - dockerfile: microservices/gatewayJobScheduler/Dockerfile - tag_with_ref: true - tag_with_sha: false - add_git_labels: true + logout: false + - uses: docker/build-push-action@v5 + if: steps.check.outputs.build_needed == 'true' + with: + context: microservices/gatewayJobScheduler + file: microservices/gatewayJobScheduler/Dockerfile push: true + tags: ghcr.io/bcgov/gwa-api/gwa-scheduler:${{ steps.tag.outputs.tag }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} gwa-kube-api: needs: sonar-scan @@ -159,19 +202,31 @@ jobs: - name: Skip message if: steps.check.outputs.build_needed == 'false' run: echo "No changes in kubeApi, skipping build" - - uses: docker/build-push-action@v1 + - name: Sanitize tag name if: steps.check.outputs.build_needed == 'true' + id: tag + run: echo "tag=$(echo '${{ github.ref_name }}' | tr '/' '-')" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + if: steps.check.outputs.build_needed == 'true' + uses: docker/setup-buildx-action@v3 + - name: Log in to GitHub Container Registry + if: steps.check.outputs.build_needed == 'true' + uses: docker/login-action@v3 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR + registry: ghcr.io + username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-kube-api - path: microservices/kubeApi - dockerfile: microservices/kubeApi/Dockerfile - tag_with_ref: true - tag_with_sha: false - add_git_labels: true + logout: false + - uses: docker/build-push-action@v5 + if: steps.check.outputs.build_needed == 'true' + with: + context: microservices/kubeApi + file: microservices/kubeApi/Dockerfile push: true + tags: ghcr.io/bcgov/gwa-api/gwa-kube-api:${{ steps.tag.outputs.tag }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} gwa-compatibility-api: needs: sonar-scan @@ -192,16 +247,73 @@ jobs: - name: Skip message if: steps.check.outputs.build_needed == 'false' run: echo "No changes in compatibilityApi, skipping build" - - uses: docker/build-push-action@v1 + - name: Sanitize tag name + if: steps.check.outputs.build_needed == 'true' + id: tag + run: echo "tag=$(echo '${{ github.ref_name }}' | tr '/' '-')" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + if: steps.check.outputs.build_needed == 'true' + uses: docker/setup-buildx-action@v3 + - name: Log in to GitHub Container Registry if: steps.check.outputs.build_needed == 'true' + uses: docker/login-action@v3 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR + registry: ghcr.io + username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-compatibility-api - path: microservices/compatibilityApi - dockerfile: microservices/compatibilityApi/Dockerfile - tag_with_ref: true - tag_with_sha: false - add_git_labels: true + logout: false + - uses: docker/build-push-action@v5 + if: steps.check.outputs.build_needed == 'true' + with: + context: microservices/compatibilityApi + file: microservices/compatibilityApi/Dockerfile + push: true + tags: ghcr.io/bcgov/gwa-api/gwa-compatibility-api:${{ steps.tag.outputs.tag }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} + + gwa-csit-oas-validation-api: + needs: sonar-scan + name: Docker Image for gwa-csit-oas-validation-api + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 2 + - name: Check if build needed + id: check + run: | + if git diff --name-only HEAD^ HEAD | grep -q "^microservices/csitOasValidationApi/\|^.github/workflows/dev.yml"; then + echo "build_needed=true" >> $GITHUB_OUTPUT + else + echo "build_needed=false" >> $GITHUB_OUTPUT + fi + - name: Skip message + if: steps.check.outputs.build_needed == 'false' + run: echo "No changes in csitOasValidationApi, skipping build" + - name: Sanitize tag name + if: steps.check.outputs.build_needed == 'true' + id: tag + run: echo "tag=$(echo '${{ github.ref_name }}' | tr '/' '-')" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + if: steps.check.outputs.build_needed == 'true' + uses: docker/setup-buildx-action@v3 + - name: Log in to GitHub Container Registry + if: steps.check.outputs.build_needed == 'true' + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + logout: false + - uses: docker/build-push-action@v5 + if: steps.check.outputs.build_needed == 'true' + with: + context: microservices/csitOasValidationApi + file: microservices/csitOasValidationApi/Dockerfile push: true + tags: ghcr.io/bcgov/gwa-api/gwa-csit-oas-validation-api:${{ steps.tag.outputs.tag }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index a21a13ab..51a3f7fc 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -65,6 +65,22 @@ jobs: poetry run coverage run --branch -m pytest -s -v poetry run coverage xml + - uses: actions/setup-python@v6 + with: + python-version: "3.14" + - name: Test coverage for CSIT OAS Validation API + run: | + export PATH=/root/.local/bin:$PATH + sudo apt install -y nodejs npm + sudo npm install -g @stoplight/spectral-cli@6.14.2 + cd microservices/csitOasValidationApi + poetry env use python3.14 + poetry install --no-root + ./checkout-ruleset-tags.sh ruleset_tag_cache || true + export GITHUB_TAG_CACHE_PATH="$(realpath -m ./ruleset_tag_cache)" + poetry run coverage run --branch -m pytest -s -v + poetry run coverage xml + - name: SonarCloud Scan uses: sonarsource/sonarcloud-github-action@master env: @@ -81,54 +97,66 @@ jobs: echo "Got tag name ${{ steps.release.outputs.tag_name }}" echo "Got release version ${{ steps.release.outputs.version }}" - - name: Create gwa-api docker image related to the release - uses: docker/build-push-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR + registry: ghcr.io + username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-gateway-api - path: microservices/gatewayApi - dockerfile: microservices/gatewayApi/Dockerfile + logout: false + - name: Create gwa-api docker image related to the release + uses: docker/build-push-action@v5 + with: + context: microservices/gatewayApi + file: microservices/gatewayApi/Dockerfile push: true - tags: ${{ steps.release.outputs.tag_name }} - tag_with_sha: false + tags: ghcr.io/bcgov/gwa-api/gwa-gateway-api:${{ steps.release.outputs.tag_name }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} - name: Create gwa-kube-api docker image related to the release - uses: docker/build-push-action@v1 + uses: docker/build-push-action@v5 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR - password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-kube-api - path: microservices/kubeApi - dockerfile: microservices/kubeApi/Dockerfile + context: microservices/kubeApi + file: microservices/kubeApi/Dockerfile push: true - tags: ${{ steps.release.outputs.tag_name }} - tag_with_sha: false + tags: ghcr.io/bcgov/gwa-api/gwa-kube-api:${{ steps.release.outputs.tag_name }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} - name: Create gwa-scheduler docker image related to the release - uses: docker/build-push-action@v1 + uses: docker/build-push-action@v5 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR - password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-scheduler - path: microservices/gatewayJobScheduler - dockerfile: microservices/gatewayJobScheduler/Dockerfile + context: microservices/gatewayJobScheduler + file: microservices/gatewayJobScheduler/Dockerfile push: true - tags: ${{ steps.release.outputs.tag_name }} - tag_with_sha: false + tags: ghcr.io/bcgov/gwa-api/gwa-scheduler:${{ steps.release.outputs.tag_name }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} - name: Create gwa-compatibility-api docker image related to the release - uses: docker/build-push-action@v1 + uses: docker/build-push-action@v5 with: - registry: docker.pkg.github.com - username: $GITHUB_ACTOR - password: ${{ secrets.GITHUB_TOKEN }} - repository: bcgov/gwa-api/gwa-compatibility-api - path: microservices/compatibilityApi - dockerfile: microservices/compatibilityApi/Dockerfile + context: microservices/compatibilityApi + file: microservices/compatibilityApi/Dockerfile + push: true + tags: ghcr.io/bcgov/gwa-api/gwa-compatibility-api:${{ steps.release.outputs.tag_name }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} + + - name: Create gwa-csit-oas-validation-api docker image related to the release + uses: docker/build-push-action@v5 + with: + context: microservices/csitOasValidationApi + file: microservices/csitOasValidationApi/Dockerfile push: true - tags: ${{ steps.release.outputs.tag_name }} - tag_with_sha: false \ No newline at end of file + tags: ghcr.io/bcgov/gwa-api/gwa-csit-oas-validation-api:${{ steps.release.outputs.tag_name }} + labels: | + org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }} + org.opencontainers.image.revision=${{ github.sha }} \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..f982feb4 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.14.0 diff --git a/microservices/csitOasValidationApi/.gitignore b/microservices/csitOasValidationApi/.gitignore new file mode 100644 index 00000000..e1f32efd --- /dev/null +++ b/microservices/csitOasValidationApi/.gitignore @@ -0,0 +1,139 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ +ruleset_tag_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ diff --git a/microservices/csitOasValidationApi/Dockerfile b/microservices/csitOasValidationApi/Dockerfile new file mode 100644 index 00000000..40550425 --- /dev/null +++ b/microservices/csitOasValidationApi/Dockerfile @@ -0,0 +1,66 @@ +# BUILD STAGE +FROM python:3.14.2-alpine3.23 AS builder + +WORKDIR /build + +# Install build dependencies +RUN apk add --no-cache build-base libffi-dev openssl curl git bash nodejs npm + +RUN python -m pip install --upgrade pip + +# Install Spectral CLI (required for validation) +RUN npm install -g @stoplight/spectral-cli@6.14.2 + +# Install Poetry +RUN cd /tmp && \ + curl -sSL https://install.python-poetry.org > get-poetry.py && \ + POETRY_HOME=/opt/poetry python get-poetry.py --version 1.8.2 && \ + cd /usr/local/bin && \ + ln -s /opt/poetry/bin/poetry && \ + poetry config virtualenvs.create false + +# Install Python dependencies +COPY pyproject.toml /tmp/ +COPY poetry.lock /tmp/ +RUN cd /tmp && poetry install --no-root --no-dev + +# Retrieve and cache the ruleset +COPY checkout-ruleset-tags.sh /build/ +RUN chmod +x /build/checkout-ruleset-tags.sh && \ + mkdir -p /build/ruleset_tag_cache && \ + /build/checkout-ruleset-tags.sh /build/ruleset_tag_cache + +# RUNTIME STAGE +FROM python:3.14.2-alpine3.23 + +WORKDIR /app + +# Install only runtime dependencies +# nodejs is needed for Spectral CLI to run +RUN apk add --no-cache libffi openssl nodejs + +# Copy Python packages from builder +COPY --from=builder /usr/local/lib/python3.14/site-packages /usr/local/lib/python3.14/site-packages + +# Copy Python entry point scripts (uvicorn, etc.) but exclude build tools +# First, copy all scripts, then we'll remove poetry if it exists +COPY --from=builder /usr/local/bin /usr/local/bin +RUN rm -f /usr/local/bin/poetry 2>/dev/null || true + +# Copy Spectral CLI from builder +COPY --from=builder /usr/local/lib/node_modules /usr/local/lib/node_modules +COPY --from=builder /usr/local/bin/spectral /usr/local/bin/spectral + +# Copy ruleset cache from builder +COPY --from=builder /build/ruleset_tag_cache /app/ruleset_tag_cache + +# Copy application code +COPY . /app + +RUN chmod +x /app/entrypoint.sh + +ENV GITHUB_TAG_CACHE_PATH=/app/ruleset_tag_cache + +EXPOSE 8080 + +ENTRYPOINT ["./entrypoint.sh"] diff --git a/microservices/csitOasValidationApi/README.md b/microservices/csitOasValidationApi/README.md new file mode 100644 index 00000000..06b3ded1 --- /dev/null +++ b/microservices/csitOasValidationApi/README.md @@ -0,0 +1,156 @@ +# CSIT OAS Validation API + +## Description + +This API validates OAS files. + +## Requirements. + +Python 3.14 +Spectral 6.14.2 + +### Installation + +#### Python 3.14 +This project requires Python 3.14. + +Install required packages +```bash +sudo apt update +sudo apt install -y build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev curl git libffi-dev +``` +Install pyenv +```bash +curl https://pyenv.run | bash +``` +Add pyenv to your shell (run once): +```bash +echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.bashrc +echo 'export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.bashrc +echo 'eval "$(pyenv init -)"' >> ~/.bashrc +source ~/.bashrc +``` +Install Python 3.14 + +```bash +pyenv install 3.14.0 +``` + +Set it for this project + +```bash +cd microservices/csitOasValidationApi +``` + +Verify the version: + +```bash +python --version # Should show Python 3.14.0 +``` + +#### Poetry +Install Poetry +```bash +curl -sSL https://install.python-poetry.org | python3 - +poetry config keyring.enabled false +poetry env use 3.14 +poetry install +``` + +#### Node and NPM +Install Node and NPM (required by Spectral) +Check if node and npm are already installed +```bash +node --version +npm --version +``` + +If Node and NPM need to be installed +```bash +sudo apt update +sudo apt install nodejs npm +``` + +#### Spectral +Install Stoplight Spectral +Requires 6.0.0 or greater +```bash +sudo npm install -g @stoplight/spectral-cli@6.14.2 +spectral --version +``` + +#### Docker +```bash +sudo apt install docker.io +sudo groupadd docker +sudo usermod -aG docker $USER +newgrp docker +``` + +#### Docker + +```bash +docker build --tag csitoasvalidationapi . + +docker run -ti --rm \ + -p 8080:8080 \ + -e LOG_LEVEL=DEBUG \ + csitoasvalidationapi +``` + +## Tests + +To run the server, you will need to check out the ruleset versions you +want to be available to the service in a local directory and set the GITHUB_TAG_CACHE_PATH +environment varable to the root of the directory, before starting the service. + +```bash +./checkout-ruleset-tags.sh ruleset_tag_cache +``` + +```bash +export GITHUB_TAG_CACHE_PATH="$(realpath -m ./ruleset_tag_cache)" + +poetry run uvicorn csit_validation.main:app --reload --port 8080 +``` + +and open your browser at `http://localhost:8080/docs/` to see the docs. + +Testing: + +```sh +poetry run pytest -v -s --log-cli-level=DEBUG +poetry run coverage run --branch -m pytest -s +poetry run coverage xml +``` + +List all available versions (sorted newest first) +```sh +curl -s http://localhost:8080/versions | jq . +``` + +List rulesets for version v0.1.0-test +```sh +curl -s http://localhost:8080/versions/v0.1.0-test/rulesets | jq . +``` + +Validate with JSON document +```sh +curl -X POST http://localhost:8080/versions/v0.1.0-test/rulesets/basic-ruleset/validations \ +-H "Content-Type: application/json" \ +-d '{ + "openapi": "3.1.0", + "info": { + "title": "Test API", + "version": "1.0.0" + }, + "paths": { + "/users": { + "get": { + "summary": "List users" + } + } + } +}' \ +| jq . +``` \ No newline at end of file diff --git a/microservices/csitOasValidationApi/checkout-ruleset-tags.sh b/microservices/csitOasValidationApi/checkout-ruleset-tags.sh new file mode 100755 index 00000000..8f656a0b --- /dev/null +++ b/microservices/csitOasValidationApi/checkout-ruleset-tags.sh @@ -0,0 +1,68 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Configuration ──────────────────────────────────────────────────────────────── +REPO_URL="https://github.com/bcgov/csit-api-governance-spectral-style-guide.git" +CACHE_BASE_DIR="${1:-./tag-cache}" +TAG_PREFIX="ruleset-" + +# ────────────────────────────────────────────────────────────────────────────── + +mkdir -p "$CACHE_BASE_DIR" +cd "$CACHE_BASE_DIR" || exit 1 + +# 1. Create a bare clone if it doesn't exist yet +BARE_REPO="bare.git" +if [ ! -d "$BARE_REPO" ]; then + echo "Creating bare clone …" + git clone --mirror "$REPO_URL" "$BARE_REPO" +fi + +# 2. Enter bare repo and fetch latest tags +cd "$BARE_REPO" || exit 1 +echo "Fetching latest tags …" +git fetch --tags --prune origin + +# 3. Find all matching tags (sorted by version) +mapfile -t TAGS < <(git tag -l "${TAG_PREFIX}*" --sort=version:refname) + +if [ ${#TAGS[@]} -eq 0 ]; then + echo "No tags found matching '${TAG_PREFIX}*'" + exit 1 +fi + +echo "" +echo "Found ${#TAGS[@]} tags matching '${TAG_PREFIX}*':" +printf ' - %s\n' "${TAGS[@]}" +echo "" + +# 4. Create the structure and check out each tag as a worktree +cd .. || exit 1 +mkdir -p tags + +for tag in "${TAGS[@]}"; do + target="tags/$tag" + + if [ -d "$target" ]; then + echo "Already exists: $target → skipping" + continue + fi + + echo "Checking out $tag → $target" + + # Create worktree (detached HEAD) + git -C "$BARE_REPO" worktree add --detach "../$target" "$tag" + + # Optional: show a quick summary + (cd "$target" && git --no-pager log -1 --oneline --decorate) + echo "" +done + +# Optional: list all active worktrees +echo "All worktrees:" +git -C "$BARE_REPO" worktree list + +echo "" +echo "Done." +echo "Cache structure created under: $(pwd)" +tree -L 2 2>/dev/null || ls -1 tags \ No newline at end of file diff --git a/microservices/csitOasValidationApi/csit_validation/apis/__init__.py b/microservices/csitOasValidationApi/csit_validation/apis/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/csit_validation/apis/discovery_api.py b/microservices/csitOasValidationApi/csit_validation/apis/discovery_api.py new file mode 100644 index 00000000..7c6e0d5c --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/discovery_api.py @@ -0,0 +1,68 @@ +import importlib +import pkgutil + +from csit_validation.apis.discovery_api_base import BaseDiscoveryApi +import csit_validation.impl + +from fastapi import ( + APIRouter, + HTTPException, + Path, +) + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from csit_validation.apis.errors.error_response import ErrorResponse +from csit_validation.models.ruleset_list import RulesetList +from csit_validation.models.version_list import VersionList + + +router = APIRouter() + +ns_pkg = csit_validation.impl +for _, name, _ in pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + "."): + importlib.import_module(name) + + +@router.get( + "/versions", + operation_id="listVersions", + responses={ + 200: {"model": VersionList, "description": "Successful response"}, + 500: {"model": ErrorResponse, "description": "Internal server error"}, + }, + tags=["Discovery"], + summary="List available versions of the API Governance rules", + response_model_by_alias=True, +) +async def list_versions( +) -> VersionList: + """Returns all Git tags (versions) from the csit-api-governance-spectral-style-guide repository which contain Spectral rulesets.""" + if not BaseDiscoveryApi.subclasses: + raise HTTPException(status_code=500, detail="Not implemented") + return await BaseDiscoveryApi.subclasses[0]().list_versions() + + +@router.get( + "/versions/{version}/rulesets", + operation_id="listRulesets", + responses={ + 200: {"model": RulesetList, "description": "Successful response"}, + 404: {"model": ErrorResponse, "description": "Version or ruleset not found"}, + 422: { + "description": "Validation Error (automatically added by FastAPI)", + "x-remove": True # ← Custom flag to mark for removal + }, + 500: {"model": ErrorResponse, "description": "Internal server error"}, + }, + tags=["Discovery"], + summary="List Spectral rulesets in a version", + response_model_by_alias=True, +) +async def list_rulesets_in_version( + version: Annotated[StrictStr, Field(description="Version (Git tag) name")] = Path(..., description="Version (Git tag) name", examples=["v1.0.0"]), +) -> RulesetList: + """Returns the list of Spectral rulesets available in the specified version.""" + if not BaseDiscoveryApi.subclasses: + raise HTTPException(status_code=500, detail="Not implemented") + return await BaseDiscoveryApi.subclasses[0]().list_rulesets_in_version(version) diff --git a/microservices/csitOasValidationApi/csit_validation/apis/discovery_api_base.py b/microservices/csitOasValidationApi/csit_validation/apis/discovery_api_base.py new file mode 100644 index 00000000..e9003218 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/discovery_api_base.py @@ -0,0 +1,23 @@ +from typing import ClassVar, Tuple + +from csit_validation.models.ruleset_list import RulesetList +from csit_validation.models.version_list import VersionList + + +class BaseDiscoveryApi: + subclasses: ClassVar[Tuple] = () + + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + BaseDiscoveryApi.subclasses = BaseDiscoveryApi.subclasses + (cls,) + async def list_versions( + self, + ) -> VersionList: + ... + + + async def list_rulesets_in_version( + self, + version: str, + ) -> RulesetList: + ... diff --git a/microservices/csitOasValidationApi/csit_validation/apis/errors/__init__.py b/microservices/csitOasValidationApi/csit_validation/apis/errors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/csit_validation/apis/errors/error_response.py b/microservices/csitOasValidationApi/csit_validation/apis/errors/error_response.py new file mode 100644 index 00000000..c64e7a98 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/errors/error_response.py @@ -0,0 +1,98 @@ +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + + + +from pydantic import BaseModel, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Annotated +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +DETAILS_JSON_SCHEMA_EXTRA = { + "example": { + "correlationId": "req-abc123-xyz", + "timestamp": "2026-01-16T19:22:00Z" + } +} + +class ErrorResponse(BaseModel): + """ + Standard error response format for unexpected or server-side errors (e.g., 500 Internal Server Error, 403 Forbidden, 401 Unauthorized, etc.). This is used when a more structured Problem Details response (RFC 9457) is not appropriate or when the error is general rather than validation-specific. + """ + error: StrictStr = Field(description="A short, machine-readable error code or identifier that categorizes the type of error. This field is stable and intended for programmatic handling by clients (e.g., mapping to specific error-handling logic). Common values include 'internal_error', 'forbidden', 'unauthorized', 'rate_limit_exceeded', etc.", json_schema_extra={"example":"forbidden"}) + message: StrictStr = Field(description="A human-readable summary of the error, suitable for display to end-users or logging. Should be clear, concise, and avoid exposing internal technical details or sensitive information (per security best practices).", json_schema_extra={"example":"You are not authorized to access this resource"}) + details: Optional[Dict[str, Any]] = Field(default=None, description="Optional additional context or structured details about the error. This can include extra information useful for debugging (e.g., error codes from downstream systems, correlation IDs, or custom attributes). Use sparingly and avoid including sensitive data.", json_schema_extra=DETAILS_JSON_SCHEMA_EXTRA) + __properties: ClassVar[List[str]] = ["error", "message", "details"] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra" : { + "examples": [ + { + "error": "forbidden", + "message": "You are not authorized to access this resource", + "details": { + "correlationId": "req-abc123-xyz" + } + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ErrorResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={ + }, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of ErrorResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "error": obj.get("error"), + "message": obj.get("message"), + "details": obj.get("details") + }) + return _obj + + diff --git a/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_error_item.py b/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_error_item.py new file mode 100644 index 00000000..37dcbf36 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_error_item.py @@ -0,0 +1,138 @@ +from __future__ import annotations +import pprint +import json + + + + +from pydantic import BaseModel, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from csit_validation.apis.errors.problem_detail_error_location import ProblemDetailErrorLocation +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +CONSTRAINTS_JSON_SCHEMA_EXTRA = { + "example": { + "minLength": 9, + "pattern": "^\\d{9}$" + } +} + +class ProblemDetailErrorItem(BaseModel): + """ + Represents a single detailed error within a Problem Details response (RFC 9457). Provides granular information about what went wrong in the request, typically used in validation, semantic, or business-rule failures. + """ + location: ProblemDetailErrorLocation = Field(description="The part of the HTTP request where the error occurred (body, query, path, header, etc.). Helps clients quickly locate the problematic input.") + code: StrictStr = Field(description="A machine-readable error code that identifies the specific type of error. Should be stable and documented for programmatic handling. Follows the 'application/problem+json' error code pattern from RFC 9457.", json_schema_extra={"example":"INVALID_LINE_NUMBER"}) + message: StrictStr = Field(description="A short, human-readable summary of the error suitable for display to end-users or in logs. Should be clear and concise (RFC 9457 recommends avoiding technical jargon where possible).", json_schema_extra={"example":"Line number does not exist in the specified document"}) + type: StrictStr = Field(description="A URI reference or tag that identifies the problem type (RFC 9457 'type' field). Often used to categorize errors (e.g., validation, authorization, business-rule). When using tags instead of URIs, prefix with 'tag:' is a common convention.", json_schema_extra={"example":"tag:validation-error"}) + field: Optional[StrictStr] = Field(default=None, description="The name of the specific field/property in the request that caused the error (when applicable). Useful for form-based or structured input validation.", json_schema_extra={"example":"taxYear"}) + detail: Optional[StrictStr] = Field(default=None, description="A more detailed human-readable explanation of the error, providing additional context beyond the short message (maps to RFC 9457 'detail' field). May include contextual information or suggested corrections.", json_schema_extra={"example":"Line 99999 is not present in T1 General for 2024"}) + received: Optional[StrictStr] = Field(default=None, description="The actual value received by the server that caused the error (useful for debugging and helping clients understand what was invalid).", json_schema_extra={"example":"99999"}) + pointer: Optional[StrictStr] = Field(default=None, description="JSON Pointer (RFC 6901) to the exact location of the error within the request body (e.g., '#/lineNumbers/0'). Highly recommended for deep/nested validation errors (aligns with RFC 9457 best practices).", json_schema_extra={"example":"#/lineNumbers/1"}) + constraints: Optional[Dict[str, Any]] = Field(default=None, description="Optional object containing validation constraint violations (e.g., minLength, pattern, enum values). Keys are constraint names, values are expected values or descriptions. Useful for schema-based validation libraries.", json_schema_extra=CONSTRAINTS_JSON_SCHEMA_EXTRA) + __properties: ClassVar[List[str]] = ["location", "code", "message", "type", "field", "detail", "received", "pointer", "constraints"] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra" : { + "examples": [ + { + "location": "body", + "code": "REQUIRED_FIELD_MISSING", + "message": "Missing required identifier", + "type": "tag:validation-error", + "pointer": "#/identifier", + "detail": "One of 'sin' or both 'fullLegalName' and 'birthDate' must be provided", + "received": "", + "field": "individual" + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ProblemDetailErrorItem from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={ + }, + exclude_none=True, + ) + # set to None if var_field (nullable) is None + # and model_fields_set contains the field + if self.var_field is None and "var_field" in self.model_fields_set: + _dict['field'] = None + + # set to None if detail (nullable) is None + # and model_fields_set contains the field + if self.detail is None and "detail" in self.model_fields_set: + _dict['detail'] = None + + # set to None if received (nullable) is None + # and model_fields_set contains the field + if self.received is None and "received" in self.model_fields_set: + _dict['received'] = None + + # set to None if pointer (nullable) is None + # and model_fields_set contains the field + if self.pointer is None and "pointer" in self.model_fields_set: + _dict['pointer'] = None + + # set to None if constraints (nullable) is None + # and model_fields_set contains the field + if self.constraints is None and "constraints" in self.model_fields_set: + _dict['constraints'] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of ProblemDetailErrorItem from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "location": obj.get("location"), + "code": obj.get("code"), + "message": obj.get("message"), + "type": obj.get("type"), + "field": obj.get("field"), + "detail": obj.get("detail"), + "received": obj.get("received"), + "pointer": obj.get("pointer"), + "constraints": obj.get("constraints") + }) + return _obj + + diff --git a/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_error_location.py b/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_error_location.py new file mode 100644 index 00000000..1891e094 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_error_location.py @@ -0,0 +1,49 @@ +from __future__ import annotations +import json +from enum import Enum +from pydantic import GetJsonSchemaHandler +from pydantic.json_schema import JsonSchemaValue + + + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + + +class ProblemDetailErrorLocation(str, Enum): + """ + The location on the HTTP request for which a problem has been detected. (e.g., body, query, header, path, cookie). + """ + + """ + allowed enum values + """ + BODY = 'body' + QUERY = 'query' + HEADER = 'header' + PATH = 'path' + COOKIE = 'cookie' + + @classmethod + def __get_pydantic_json_schema__( + cls, + core_schema, + handler: GetJsonSchemaHandler, + ) -> JsonSchemaValue: + + # gets {"type": "string", "enum": [...], ...} + json_schema = handler(core_schema) + + # Add (or override) examples cleanly + json_schema["examples"] = ["body"] + + return json_schema + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ProblemDetailErrorLocation from a JSON string""" + return cls(json.loads(json_str)) + + diff --git a/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_response.py b/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_response.py new file mode 100644 index 00000000..b3c3e48e --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/errors/problem_detail_response.py @@ -0,0 +1,118 @@ +from __future__ import annotations +import pprint +import json + + + + +from pydantic import BaseModel, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated +from csit_validation.apis.errors.problem_detail_error_item import ProblemDetailErrorItem +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +class ProblemDetailResponse(BaseModel): + """ + Canonical model for problem details as defined by RFC-9457 + """ + type: StrictStr = Field(description="A URI reference that identifies the problem type", json_schema_extra={"example":"tag:validation-errors"}) + title: StrictStr = Field(description="A short, human-readable summary of the problem type", json_schema_extra={"example":"Bad Request"}) + status: StrictInt = Field(description="A number indicating the HTTP status code generated for this occurrence of the problem", json_schema_extra={"example":400}) + detail: Optional[StrictStr] = Field(default=None, description="A human-readable explanation specific to this occurrence of the problem", json_schema_extra={"example":"One or more validation errors occurred"}) + errors: Annotated[List[ProblemDetailErrorItem], Field(min_length=1)] = Field(description="A list of individual error occurrences found, with details and a pointer to the location of each") + __properties: ClassVar[List[str]] = ["type", "title", "status", "detail", "errors"] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra" : { + "examples": [ + { + "type": "tag:validation-errors", + "title": "Bad Request", + "status": 400, + "detail": "Invalid line numbers provided", + "instance": "/tax-documents/query", + "errors": [ + { + "location": "body", + "code": "INVALID_LINE_NUMBER", + "message": "Line number does not exist in the document", + "type": "tag:validation-error", + "pointer": "#/lineNumbers/0", + "received": "99999" + } + ] + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ProblemDetailResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={ + }, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in errors (list) + _items = [] + if self.errors: + for _item in self.errors: + if _item: + _items.append(_item.to_dict()) + _dict['errors'] = _items + # set to None if detail (nullable) is None + # and model_fields_set contains the field + if self.detail is None and "detail" in self.model_fields_set: + _dict['detail'] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of ProblemDetailResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "type": obj.get("type"), + "title": obj.get("title"), + "status": obj.get("status"), + "detail": obj.get("detail"), + "errors": [ProblemDetailErrorItem.from_dict(_item) for _item in obj.get("errors")] if obj.get("errors") is not None else None + }) + return _obj + + diff --git a/microservices/csitOasValidationApi/csit_validation/apis/root_api.py b/microservices/csitOasValidationApi/csit_validation/apis/root_api.py new file mode 100644 index 00000000..d0e09a7c --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/root_api.py @@ -0,0 +1,101 @@ +import importlib +import pkgutil + +from csit_validation.apis.root_api_base import BaseRootApi +import csit_validation.impl + +from fastapi import ( + APIRouter, + HTTPException, +) +from csit_validation.models.health import HealthResponse +from csit_validation.apis.errors.error_response import ErrorResponse + +from pydantic import StrictStr + + +router = APIRouter() + +ns_pkg = csit_validation.impl +for _, name, _ in pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + "."): + importlib.import_module(name) + + +@router.get( + "/", + responses={ + 307: {"description": "Temporary Redirect to API documentation"}, + }, + tags=["Info"], + summary="Root endpoint - redirects to API documentation", + response_model_by_alias=True, +) +async def root() -> str: + """Redirects the root URL (/) to the interactive API documentation (/docs).""" + if not BaseRootApi.subclasses: + raise HTTPException(status_code=500, detail="Not implemented") + return await BaseRootApi.subclasses[0]().root() + + +@router.get( + "/livez", + responses={ + 200: {"model": StrictStr, "description": "Successful response"}, + }, + tags=["Liveness"], + summary="Kubernetes liveness probe", + response_model_by_alias=True, +) +async def livez() -> str: + """Liveness probe - returns 200 if the FastAPI process is alive and responding.""" + if not BaseRootApi.subclasses: + raise HTTPException(status_code=500, detail="Not implemented") + return await BaseRootApi.subclasses[0]().livez() + + +@router.get( + "/readyz", + responses={ + 200: {"model": StrictStr, "description": "Successful response"}, + }, + tags=["Ready"], + summary="Kubernetes readiness probe", + response_model_by_alias=True, +) +async def readyz() -> str: + """Readiness probe - returns 200 only when the service can meaningfully serve traffic (at least one discovery implementation is loaded).""" + if not BaseRootApi.subclasses: + raise HTTPException(status_code=500, detail="Not implemented") + return await BaseRootApi.subclasses[0]().readyz() + + +@router.get( + "/health", + responses={ + 200: {"model": HealthResponse, "description": "Detailed service health status"}, + 500: {"model": ErrorResponse, "description": "No implementation available"}, + }, + tags=["Health"], + summary="Detailed health check endpoint", + response_model_by_alias=True, +) +async def health() -> HealthResponse: + """ + Health check endpoint returning structured service health information. + + This endpoint provides more detailed health information than the binary + /livez and /readyz probes. It is suitable for: + - External monitoring tools + - Status dashboards + - Debugging and alerting + + Returns: + HealthStatus: An object/enum indicating overall health (healthy/degraded/unhealthy) + along with optional message, components, etc. + + Raises: + HTTPException(500): If no BaseRootApi implementation is registered + """ + if not BaseRootApi.subclasses: + raise HTTPException(status_code=500, detail="Not implemented") + return await BaseRootApi.subclasses[0]().health() diff --git a/microservices/csitOasValidationApi/csit_validation/apis/root_api_base.py b/microservices/csitOasValidationApi/csit_validation/apis/root_api_base.py new file mode 100644 index 00000000..31a8ce55 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/root_api_base.py @@ -0,0 +1,31 @@ +from typing import ClassVar, Tuple + +from csit_validation.models.health import HealthResponse + + +class BaseRootApi: + subclasses: ClassVar[Tuple] = () + + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + BaseRootApi.subclasses = BaseRootApi.subclasses + (cls,) + + async def root( + self, + ) -> str: + ... + + async def livez( + self, + ) -> str: + ... + + async def readyz( + self, + ) -> str: + ... + + async def health( + self, + ) -> HealthResponse: + ... diff --git a/microservices/csitOasValidationApi/csit_validation/apis/validation_api.py b/microservices/csitOasValidationApi/csit_validation/apis/validation_api.py new file mode 100644 index 00000000..a536740f --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/validation_api.py @@ -0,0 +1,166 @@ +import importlib +import pkgutil + +from csit_validation.apis.validation_api_base import BaseValidationApi +import csit_validation.impl + +from fastapi import ( # noqa: F401 + APIRouter, + HTTPException, + Path, + Request +) + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from csit_validation.apis.errors.error_response import ErrorResponse +from csit_validation.models.validation_response import ValidationResponse +from csit_validation.apis.errors.problem_detail_response import ProblemDetailResponse + + +router = APIRouter() + +ns_pkg = csit_validation.impl +for _, name, _ in pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + "."): + importlib.import_module(name) + +EXAMPLE_OAS_JSON = """{ + "openapi": "3.1.0", + "info": { + "title": "Pet Store API", + "version": "1.0.0", + "description": "A simple example API for managing pets" + }, + "paths": { + "/pets": { + "get": { + "summary": "List all pets", + "operationId": "listPets", + "responses": { + "200": { + "description": "A list of pets", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Pet" + } + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "Pet": { + "type": "object", + "required": ["id", "name"], + "properties": { + "id": { "type": "integer" }, + "name": { "type": "string" }, + "tag": { "type": "string" } + } + } + } + } +} +""" + +EXAMPLE_OAS_YAML = """openapi: 3.1.0 +info: + title: Pet Store API + version: 1.0.0 + description: A simple example API for managing pets +paths: + /pets: + get: + summary: List all pets + operationId: listPets + responses: + '200': + description: A list of pets + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/Pet' +components: + schemas: + Pet: + type: object + required: + - id + - name + properties: + id: + type: integer + name: + type: string + tag: + type: string +""" + +@router.post( + "/versions/{version}/rulesets/{ruleset:path}/validations", + operation_id="createValidation", + responses={ + 200: {"model": ValidationResponse, "description": "Validation completed successfully"}, + 400: {"model": ProblemDetailResponse, "description": "Invalid request (missing file, unsupported format, etc.)"}, + 404: {"model": ErrorResponse, "description": "Version or ruleset not found"}, + 422: {"model": ProblemDetailResponse, "description": "OAS document could not be parsed"}, + 500: {"model": ErrorResponse, "description": "Internal server error"}, + }, + tags=["Validation"], + summary="Validate an OpenAPI document", + response_model_by_alias=True, + # We are unable to use the annotations to generate the OpenApi request body as desired so we are providing the + # definition manually. + openapi_extra={ + "requestBody": { + "required": True, + "description": "The raw OpenAPI document to validate. Send as JSON or YAML with appropriate Content-Type header.", + "content": { + "application/json": { + "schema": { + "type": "string", + "format": "binary", + "description": "OpenAPI document in JSON format" + }, + "examples": { + "pet-store-json": { + "summary": "An example JSON Open API specification", + "value": EXAMPLE_OAS_JSON + } + } + }, + "application/yaml": { + "schema": { + "type": "string", + "format": "binary", + "description": "OpenAPI document in YAML format" + }, + "examples": { + "pet-store-yaml": { + "summary": "An example YAML Open API specification", + "value": EXAMPLE_OAS_YAML + } + } + } + } + }, + }, +) +async def create_validation( + version: Annotated[StrictStr, Field(description="Version (Git tag) name")] = Path(..., description="Version (Git tag) name", examples=["v1.0.0"]), + ruleset: Annotated[StrictStr, Field(description="Path to the Spectral rules file (URL-encoded if necessary)")] = Path(..., description="Path to the Spectral rules file (URL-encoded if necessary)", examples=["rulesets/basic-ruleset.yml"]), + request: Request = None, +) -> ValidationResponse: + """Creates a new validation resource by running Spectral against the uploaded OpenAPI document using the specified ruleset from the given version.""" + if not BaseValidationApi.subclasses: + raise HTTPException(status_code=500, detail="Not implemented") + return await BaseValidationApi.subclasses[0]().create_validation(version, ruleset, request) diff --git a/microservices/csitOasValidationApi/csit_validation/apis/validation_api_base.py b/microservices/csitOasValidationApi/csit_validation/apis/validation_api_base.py new file mode 100644 index 00000000..b6c9cb42 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/apis/validation_api_base.py @@ -0,0 +1,18 @@ +from typing import ClassVar, Tuple + +from csit_validation.models.validation_response import ValidationResponse +from fastapi import Request + +class BaseValidationApi: + subclasses: ClassVar[Tuple] = () + + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + BaseValidationApi.subclasses = BaseValidationApi.subclasses + (cls,) + async def create_validation( + self, + version: str, + ruleset: str, + request: Request + ) -> ValidationResponse: + ... diff --git a/microservices/csitOasValidationApi/csit_validation/core/config.py b/microservices/csitOasValidationApi/csit_validation/core/config.py new file mode 100644 index 00000000..9751bce2 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/core/config.py @@ -0,0 +1,29 @@ +from starlette.config import Config +import os +from pathlib import Path +from functools import lru_cache + +# Config will be read from environment variables and/or ".env" files. +config = Config(env_file=".env" if os.path.exists(".env") else None) + +# The VERSION_TAG_PREFIX environment variable will allow us to identify tags for specific environments +# e.g. dev-ruleset- for the dev environment, etc.. +VERSION_TAG_PREFIX = config('VERSION_TAG_PREFIX', default="ruleset-") +RULESET_DIRECTORY = config('RULESET_DIRECTORY', default="spectral") + +# ── Lazy computation for GITHUB_TAG_CACHE_PATH ─────────────────────────────── + +@lru_cache(maxsize=1) +def get_github_tag_cache_path() -> Path: + """ + Lazily returns the cache path. + - Reads the config only when first called + - Uses default if not set + - Result is cached for subsequent calls (performance + consistency) + """ + + return config('GITHUB_TAG_CACHE_PATH', default="csit-spectral-cache") + + +# Public name that can be used like before (but now lazy) +GITHUB_TAG_CACHE_PATH = get_github_tag_cache_path diff --git a/microservices/csitOasValidationApi/csit_validation/impl/__init__.py b/microservices/csitOasValidationApi/csit_validation/impl/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/csit_validation/impl/discovery_impl.py b/microservices/csitOasValidationApi/csit_validation/impl/discovery_impl.py new file mode 100644 index 00000000..b38e96b7 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/impl/discovery_impl.py @@ -0,0 +1,98 @@ +# csit_validation/impl/discovery_impl.py + +import logging + +from fastapi import HTTPException +from semver import Version +from pathlib import Path + +from csit_validation.apis.discovery_api_base import BaseDiscoveryApi +from csit_validation.models.version_list import VersionList +from csit_validation.models.ruleset_list import RulesetList +from csit_validation.services.cached_rulesets_service import CachedRulesetsService +from csit_validation.core.config import ( + GITHUB_TAG_CACHE_PATH, + VERSION_TAG_PREFIX, + RULESET_DIRECTORY, +) + +logger = logging.getLogger(__name__) + + +class DiscoveryApiImpl(BaseDiscoveryApi): + """ + The Discovery API endpoints support the listing of published versions of the CSIT API Governance Rules + and the rulesets available in each version. + + The primary purpose of these endpoints is to allow the versions and rulesets to be discovered so they can be + used as parameters to the OAS Validaton requests implemented by the Validation API. + + See the GitHubRulesetService for details on how the versions and rules sets are discovered. + """ + + def __init__(self): + + self.github_tag_cache_path = GITHUB_TAG_CACHE_PATH + self.version_tag_prefix = VERSION_TAG_PREFIX + self.ruleset_dir = RULESET_DIRECTORY + self.tag_prefix = "ruleset-" + + self.gh = CachedRulesetsService( + self.github_tag_cache_path, + self.version_tag_prefix, + self.ruleset_dir, + ) + + async def list_versions(self) -> VersionList: + """List all git tags that start with 'ruleset-' followed by a valid semantic version, + returning only the version part (prefix removed) sorted newest first.""" + + valid_tags = self.gh.get_valid_version_tags + + # Sort descending by semantic version (parse without prefix) + sorted_tags = sorted( + valid_tags.keys(), + key=self.parse_semver, + reverse=True # newest first + ) + + return VersionList(versions=sorted_tags) + + def parse_semver(self, tag: str) -> Version: + """Strip common 'v' prefix and parse safely.""" + clean = tag.lstrip('v') # removes leading v/V if present + return Version.parse(clean) + + async def list_rulesets_in_version( + self, + version: str + ) -> RulesetList: + """List all ruleset files available under the given version/tag.""" + + # Get the full prefixed tag name + version_to_tag_map = self.gh.get_valid_version_tags + prefixed_tag = version_to_tag_map.get(version) + logger.debug(f"Found prefixed tag: '{prefixed_tag}'") + + # If not found (get returns None), raise 404 + if prefixed_tag is None: + raise HTTPException( + status_code=404, + detail=f"Version '{version}' not found" + ) + + # Fetch the actual ruleset files + rulesets = await self.gh.get_ruleset_files_in_tag(prefixed_tag) + + file_paths = list(rulesets.keys()) + + sorted_paths = sorted( + file_paths, + key=lambda p: (Path(p).parent.as_posix() or '', Path(p).name) + ) + + # Return successful response (even if empty) + return RulesetList( + version=version, + rulesets=sorted_paths + ) \ No newline at end of file diff --git a/microservices/csitOasValidationApi/csit_validation/impl/root_impl.py b/microservices/csitOasValidationApi/csit_validation/impl/root_impl.py new file mode 100644 index 00000000..0d120053 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/impl/root_impl.py @@ -0,0 +1,143 @@ +# csit_validation/impl/root_impl.py + +import logging +import shutil +import asyncio +from datetime import datetime +from fastapi import ( + HTTPException, + status +) +from fastapi.responses import RedirectResponse + +from csit_validation.apis.root_api_base import BaseRootApi +from csit_validation.models.health import ( + HealthStatus, + HealthResponse, +) + +logger = logging.getLogger(__name__) + + +class RootApiImpl(BaseRootApi): + """ + Abstract base class for root/info/health endpoints implementations. + + Concrete subclasses should be placed in csit_validation.impl.* + and will be auto-discovered via pkgutil. + """ + + async def root(self) -> str: + """Redirects the root URL (/) to the interactive API documentation (/docs).""" + logger.debug(" str: + """ + Default liveness check: just confirm the process is responding. + + This should be extremely cheap — no I/O, no external dependencies. + Override only if you have very lightweight in-memory state to verify. + """ + logger.debug(" str: + """ + Default readiness check: confirm at least one implementation is loaded. + """ + logger.debug(" HealthResponse: + """ + Detailed health check that verifies Stoplight Spectral is installed and functional. + + Checks: + - Presence of 'spectral' CLI in PATH + - Successful execution of 'spectral --version' + + Returns HealthResponse with component-level detail for "spectral". + """ + logger.debug("= 6.0.0 (breaking change in severity format).\n" + "Please upgrade Spectral CLI." + ) + else: + logger.info(f"Using Spectral CLI version {spectral_ver}") + + self.gh = CachedRulesetsService( + self.github_tag_cache_path, + self.version_tag_prefix, + self.ruleset_dir, + ) + + @log_entry_exit(logger) + def get_spectral_version(self) -> str | None: + try: + result = subprocess.run( + ["spectral", "--version"], + capture_output=True, + text=True, + timeout=10, + check=True + ) + # Output usually looks like: "6.14.2" or sometimes "@stoplight/spectral-cli/6.14.2 linux-x64 node-v20.17.0" + output = result.stdout.strip() + # Take first token that looks like semver + for part in output.split(): + if part.count('.') >= 2 and part.replace('.', '').isdigit(): + return part + return output # fallback - better than nothing + except Exception as e: + logger.warning(f"Could not determine Spectral version: {e}") + return None + + @log_entry_exit(logger) + async def _run_spectral_cli( + self, + document_content: bytes, + ruleset_full_path: Path, + content_type: str + ) -> Tuple[bool, list[dict], ValidationResponseSummary, float]: + + suffix = ".json" if "json" in content_type else ".yaml" + + start_time = time.perf_counter() + + with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as tmp_file: + tmp_file.write(document_content) + tmp_path = Path(tmp_file.name) + + try: + cmd = [ + "spectral", "lint", + "--format", "json", + "--ruleset", str(ruleset_full_path), + "--quiet", + str(tmp_path) + ] + + # Write output to temporary files to avoid pipe buffer size limits (64KB default on many systems) + # This ensures we can capture arbitrarily large outputs from Spectral + with tempfile.NamedTemporaryFile(suffix='.json', delete=False) as tmp_output: + output_path = Path(tmp_output.name) + with tempfile.NamedTemporaryFile(suffix='.err', delete=False) as tmp_error: + error_path = Path(tmp_error.name) + + try: + # Run spectral and redirect output to files to avoid buffer limits + with open(output_path, 'w', encoding='utf-8') as out_file, \ + open(error_path, 'w', encoding='utf-8') as err_file: + process = subprocess.Popen( + cmd, + stdout=out_file, + stderr=err_file, + text=True + ) + + try: + process.wait(timeout=30) + except subprocess.TimeoutExpired: + process.kill() + process.wait() + raise HTTPException(500, "Spectral validation timed out after 30 seconds") + + # Read the output files after process completes + with open(output_path, 'r', encoding='utf-8') as out_file: + stdout = out_file.read() + + with open(error_path, 'r', encoding='utf-8') as err_file: + stderr = err_file.read() + finally: + # Clean up temporary output files + output_path.unlink(missing_ok=True) + error_path.unlink(missing_ok=True) + + duration_ms = round((time.perf_counter() - start_time) * 1000) + + if process.returncode not in (0, 1): + logger.error(f"Spectral failed (code {process.returncode}):\n{stderr}") + raise HTTPException(500, "Spectral validation engine internal error") + + # Parse JSON output, handling empty or whitespace-only output + stdout_stripped = stdout.strip() if stdout else "" + if not stdout_stripped: + output = [] + else: + try: + output = json.loads(stdout_stripped) + except json.JSONDecodeError as e: + logger.error( + f"Failed to parse Spectral JSON output. " + f"Error: {e}. " + f"Output length: {len(stdout_stripped)} chars. " + f"First 500 chars: {stdout_stripped[:500]}" + ) + # Log the problematic area around the error + if e.pos is not None: + start_pos = max(0, e.pos - 200) + end_pos = min(len(stdout_stripped), e.pos + 200) + logger.error( + f"Context around error position {e.pos}: " + f"{stdout_stripped[start_pos:end_pos]}" + ) + raise HTTPException( + 500, + f"Spectral output parsing failed: {str(e)}. " + f"The output may have been truncated or malformed." + ) + + mapped_results = [] + counts = {"error": 0, "warn": 0, "info": 0, "hint": 0} + + severity_map = {0: "error", 1: "warn", 2: "info", 3: "hint"} + + for item in output: + if not isinstance(item, dict): + continue + + raw = item.get("severity", 1) + + if isinstance(raw, int): + level = severity_map.get(raw, "unknown") + else: + level = str(raw).lower() + if level == "warning": + level = "warn" + + if level in counts: + counts[level] += 1 + + mapped_results.append({ + "code": item.get("code"), + "message": item.get("message", "No message"), + "severity": level, + "path": item.get("path", []), + "range": item.get("range") + }) + + # Reverse lookup for sorting (string → numeric priority) + severity_priority = {v: k for k, v in severity_map.items()} + + # After collecting all items + mapped_results.sort(key=lambda r: ( + severity_priority.get(r["severity"], 999), # numeric priority (error first) + r["code"] or "", # alphabetical by code + tuple(r["path"] or []) # stable path comparison + )) + + summary = ValidationResponseSummary( + errors=counts["error"], + warnings=counts["warn"], + infos=counts["info"], + hints=counts["hint"] + ) + + return summary.errors == 0, mapped_results, summary, duration_ms + + finally: + tmp_path.unlink(missing_ok=True) + + + @log_entry_exit(logger) + async def create_validation( + self, + version: str, + ruleset: str, + request: Request + ) -> ValidationResponse: + + version_to_tag_map = self.gh.get_valid_version_tags + prefixed_tag = version_to_tag_map.get(version) + if prefixed_tag is None: + raise HTTPException( + status_code=404, + detail=f"Version '{version}' not found" + ) + + ruleset_tuple = await self.gh.get_ruleset_tuple(prefixed_tag, ruleset) + if ruleset_tuple is None: + raise HTTPException( + status_code=404, + detail=f"Ruleset '{ruleset}' not found for Version '{version}'" + ) + + _, ruleset_rel_path = ruleset_tuple + logger.debug(f"ruleset_rel_path = {ruleset_rel_path}") + + raw_body = await request.body() + if not raw_body.strip(): + + return JSONResponse( + status_code=400, + content=ProblemDetailResponse( + type = "tag:validation-errors", + title = "Bad Request", + status = 400, + errors = [ + ProblemDetailErrorItem( + type = "tag:validation-error", + location = ProblemDetailErrorLocation.BODY, + code = "MISSING_BODY", + message = "Request body is required and cannot be empty" + ) + ] + ).model_dump( + mode="json", + exclude_none=True) + ) + + + content_type = request.headers.get("content-type", "").lower().split(";")[0].strip() + allowed = {"application/json", "application/yaml"} + if content_type not in allowed: + + return JSONResponse( + status_code=415, + content=ProblemDetailResponse( + type = "tag:validation-errors", + title = "Unsupported Media Type", + status = 415, + errors = [ + ProblemDetailErrorItem( + type = "tag:validation-error", + location = ProblemDetailErrorLocation.HEADER, + field = "content-type", + code = "UNSUPPORTED_MEDIA_TYPE", + message = "Only JSON and YAML are supported", + received = content_type or "missing" + ) + ] + ).model_dump( + mode="json", + exclude_none=True) + ) + + # ── Main logic ─────────────────────────────────────────────── + + logger.debug("Processing request") + + return ValidationResponse( + valid=valid, + version=version, + ruleset=ruleset, + duration_ms=duration_ms, + summary=summary, + results=results, + validated_at=datetime.now(timezone.utc) + ) \ No newline at end of file diff --git a/microservices/csitOasValidationApi/csit_validation/main.py b/microservices/csitOasValidationApi/csit_validation/main.py new file mode 100644 index 00000000..33c48748 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/main.py @@ -0,0 +1,89 @@ +from fastapi.openapi.utils import get_openapi +from fastapi_offline import FastAPIOffline + +from csit_validation.apis.root_api import router as RootApiRouter +from csit_validation.apis.discovery_api import router as DiscoveryApiRouter +from csit_validation.apis.validation_api import router as ValidationApiRouter + +tags_metadata = [ + { + "name": "Discovery", + "description": "Browse available ruleset versions and files" + }, + { + "name": "Validation", + "description": "Perform validation of OpenAPI documents" + }, +] + +# Use FastAPIOffline instead of FastAPI to serve Swagger UI assets locally +# This avoids CDN dependencies that may be blocked by firewalls +app = FastAPIOffline( + title="OAS Spectral Validation API", + description=( + "A governance API for discovering and using BCGov Spectral rulesets " + "to validate OpenAPI Specification documents.\n" + "Repository: https://github.com/bcgov/csit-api-governance-spectral-style-guide" + ), + version="0.1.0", + openapi_tags=tags_metadata +) + +app.include_router(RootApiRouter) +app.include_router(DiscoveryApiRouter) +app.include_router(ValidationApiRouter) + +def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + # Generate the full default OpenAPI schema + openapi_schema = get_openapi( + title=app.title, + version=app.version, + description=app.description, + routes=app.routes, + tags=app.openapi_tags, + servers=app.servers, + terms_of_service=app.terms_of_service, + contact=app.contact, + license_info=app.license_info, + ) + + # Remove internal/operational endpoints from public API spec + # Health endpoints are not exposed and are for internal use only + # Root endpoint is just a redirect to /docs + internal_paths = ["/", "/livez", "/readyz", "/health"] + for path in internal_paths: + openapi_schema.get("paths", {}).pop(path, None) + + # Remove any response entries that have "x-remove": true + for path_item in openapi_schema.get("paths", {}).values(): + for operation in path_item.values(): + if isinstance(operation, dict) and "responses" in operation: + responses = operation["responses"] + # Collect status codes to remove + codes_to_remove = [ + code for code, resp in responses.items() + if isinstance(resp, dict) and resp.get("x-remove", False) + ] + # Remove them + for code in codes_to_remove: + del responses[code] + + # Optional: clean up unused validation schemas if they are no longer referenced + components = openapi_schema.get("components", {}) + schemas = components.get("schemas", {}) + schemas.pop("HTTPValidationError", None) + schemas.pop("ValidationError", None) + + # Remove HealthResponse schema if health endpoint is removed + # (only remove if not used elsewhere) + if "/health" in internal_paths: + schemas.pop("HealthResponse", None) + schemas.pop("HealthStatus", None) + + app.openapi_schema = openapi_schema + return app.openapi_schema + +app.openapi = custom_openapi diff --git a/microservices/csitOasValidationApi/csit_validation/models/__init__.py b/microservices/csitOasValidationApi/csit_validation/models/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/csit_validation/models/health.py b/microservices/csitOasValidationApi/csit_validation/models/health.py new file mode 100644 index 00000000..ab58009b --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/models/health.py @@ -0,0 +1,107 @@ +from __future__ import annotations +import pprint +import re # noqa: F401 +import json +from datetime import datetime +from enum import Enum + +from pydantic import BaseModel, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional + + +class HealthStatus(str, Enum): + """Standardized health status values.""" + HEALTHY = "healthy" + DEGRADED = "degraded" + UNHEALTHY = "unhealthy" + + +HEALTH_DETAILS_EXAMPLE = { + "spectral": { + "status": "healthy" + }, +} + +class HealthResponse(BaseModel): + """ + Detailed health check response for monitoring, dashboards, and humans. + + Returns 200 OK even in degraded state (use /readyz for traffic-blocking readiness). + """ + status: HealthStatus = Field( + description="Overall service health status. Use 'healthy' when fully operational, " + "'degraded' when partially impaired but still serving (reduced capacity/SLO), " + "'unhealthy' when critical functions are broken.", + json_schema_extra={"example": "healthy"} + ) + + message: StrictStr = Field( + description="Short human-readable summary of the current health state.", + json_schema_extra={"example": "All systems operational"} + ) + + timestamp: StrictStr = Field( + description="ISO 8601 UTC timestamp when this health check was performed.", + json_schema_extra={"example": "2026-01-26T18:15:42Z"} + ) + + components: Optional[Dict[str, Dict[str, Any]]] = Field( + default=None, + description="Optional breakdown of individual component health. Each key is a component name (e.g. 'spectral', 'cache'). ", + json_schema_extra={"example": HEALTH_DETAILS_EXAMPLE} + ) + + __properties: ClassVar[List[str]] = [ + "status", "message", "timestamp", "uptime_seconds", "components", "details" + ] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra": { + "examples": [ + { + "status": "healthy", + "message": "All systems operational", + "timestamp": "2026-01-26T18:15:42Z", + "components": { + "spectral": {"status": "healthy"} + } + }, + { + "status": "degraded", + "message": "Spectral not present", + "timestamp": "2026-01-26T18:20:00Z", + "components": HEALTH_DETAILS_EXAMPLE, + } + ] + } + } + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance from a JSON string""" + return cls.model_validate_json(json_str) + + def to_dict(self) -> Dict[str, Any]: + """Return dictionary representation using alias, excluding unset fields""" + return self.model_dump( + by_alias=True, + exclude_none=True, + ) + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance from a dict""" + if obj is None: + return None + return cls.model_validate(obj) \ No newline at end of file diff --git a/microservices/csitOasValidationApi/csit_validation/models/result.py b/microservices/csitOasValidationApi/csit_validation/models/result.py new file mode 100644 index 00000000..28af609b --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/models/result.py @@ -0,0 +1,115 @@ +from __future__ import annotations +import pprint +import json + +from enum import Enum + + +from pydantic import BaseModel, GetJsonSchemaHandler, Field, StrictStr, field_validator +from pydantic.json_schema import JsonSchemaValue +from typing import Any, ClassVar, Dict, List +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +class Severity(str, Enum): + error = "error" + warn = "warn" + info = "info" + hint = "hint" + + @classmethod + def __get_pydantic_json_schema__( + cls, + core_schema, + handler: GetJsonSchemaHandler, + ) -> JsonSchemaValue: + # Let Pydantic generate the base schema: {"type": "string", "enum": ["error", "warn", ...]} + json_schema = handler(core_schema) + + # Add the example(s) at the schema level (sibling to "enum") + json_schema["examples"] = ["error"] + + return json_schema + +class Result(BaseModel): + code: StrictStr = Field(description="Rule code or identifier") + message: StrictStr = Field(description="Human-readable description of the issue") + severity: Severity = Field(description="Severity level of the result") + path: List[str] = Field(description="JSONPath-like location in the document where the issue occurred") + __properties: ClassVar[List[str]] = ["code", "message", "severity", "path"] + + @field_validator('severity') + def severity_validate_enum(cls, value): + """Validates the enum""" + if value not in ('error', 'warn', 'info', 'hint',): + raise ValueError("must be one of enum values ('error', 'warn', 'info', 'hint')") + return value + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra": { + "examples": [ + { + "code": "operation-id-camel-case", + "message": "operationId should be camelCase (starts with lowercase letter, no separators)", + "path": ["paths", "/users/{id}", "get", "operationId"], + "severity": "error" + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of Result from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={}, + exclude_none=True, + ) + # No need for special handling of path — it's just List[str] + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of Result from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + # Simple direct mapping — path is List[str] + _obj = cls.model_validate({ + "code": obj.get("code"), + "message": obj.get("message"), + "severity": obj.get("severity"), + "path": obj.get("path") if obj.get("path") is not None else None, + }) + return _obj \ No newline at end of file diff --git a/microservices/csitOasValidationApi/csit_validation/models/ruleset_list.py b/microservices/csitOasValidationApi/csit_validation/models/ruleset_list.py new file mode 100644 index 00000000..371b26d0 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/models/ruleset_list.py @@ -0,0 +1,89 @@ +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + + + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +class RulesetList(BaseModel): + """ + Sorted list of Rulesets + """ + version: StrictStr + rulesets: List[StrictStr] = Field(description="Relative paths to rules files") + __properties: ClassVar[List[str]] = ["version", "rulesets"] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra" : { + "examples": [ + { + "rulesets": [ + "basic-ruleset", + "strict-ruleset" + ], + "version": "v0.1.0" + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of RulesetList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={ + }, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of RulesetList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "version": obj.get("version"), + "rulesets": obj.get("rulesets") + }) + return _obj + + diff --git a/microservices/csitOasValidationApi/csit_validation/models/validation_response.py b/microservices/csitOasValidationApi/csit_validation/models/validation_response.py new file mode 100644 index 00000000..820e0064 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/models/validation_response.py @@ -0,0 +1,130 @@ +from __future__ import annotations +import pprint +import json + + + + +from datetime import datetime +from pydantic import BaseModel, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List +from csit_validation.models.result import Result +from csit_validation.models.validation_response_summary import ValidationResponseSummary +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +class ValidationResponse(BaseModel): + """ + ValidationResponse + """ + valid: StrictBool = Field(description="True if no errors were found (warnings may still exist)") + version: StrictStr = Field(description="The version (tag) used for validation") + ruleset: StrictStr = Field(description="The ruleset file path used for validation") + duration_ms: StrictInt = Field(description="Validation duration in milliseconds", alias="durationMs", title="Duration Ms") + summary: ValidationResponseSummary + results: List[Result] = Field(description="Detailed validation results") + validated_at: datetime = Field(description="Timestamp when validation completed", alias="validatedAt", title="Validated At") + __properties: ClassVar[List[str]] = ["valid", "version", "ruleset", "durationMs", "summary", "results", "validatedAt"] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra" : { + "examples": [ + { + "durationMs": 5000, + "ruleset": "basic-ruleset", + "results": [ + { + "code": "operation-id-camel-case", + "message": "operationId should be camelCase (starts with lowercase letter, no separators)", + "path": [ + "paths", + "/users/{id}", + "get", + "operationId" + ], + "severity": "error" + } + ], + "summary": { + "errors": 1, + "hints": 0, + "infos": 0, + "warnings": 0 + }, + "valid": False, + "validatedAt": "2025-06-17T14:35:22.147Z", + "version": "v0.1.0" + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ValidationResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={ + }, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of summary + if self.summary: + _dict['summary'] = self.summary.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in results (list) + _items = [] + if self.results: + for _item in self.results: + if _item: + _items.append(_item.to_dict()) + _dict['results'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of ValidationResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "valid": obj.get("valid"), + "version": obj.get("version"), + "ruleset": obj.get("ruleset"), + "durationMs": obj.get("durationMs"), + "summary": ValidationResponseSummary.from_dict(obj.get("summary")) if obj.get("summary") is not None else None, + "results": [Result.from_dict(_item) for _item in obj.get("results")] if obj.get("results") is not None else None, + "validatedAt": obj.get("validatedAt") + }) + return _obj + + diff --git a/microservices/csitOasValidationApi/csit_validation/models/validation_response_summary.py b/microservices/csitOasValidationApi/csit_validation/models/validation_response_summary.py new file mode 100644 index 00000000..1a829cd3 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/models/validation_response_summary.py @@ -0,0 +1,92 @@ +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + + + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +class ValidationResponseSummary(BaseModel): + """ + Count of results by severity + """ # noqa: E501 + errors: StrictInt + warnings: StrictInt + infos: StrictInt + hints: StrictInt + __properties: ClassVar[List[str]] = ["errors", "warnings", "infos", "hints"] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra" : { + "examples": [ + { + "errors": 1, + "hints": 0, + "infos": 0, + "warnings": 0 + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ValidationResponseSummary from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={ + }, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of ValidationResponseSummary from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "errors": obj.get("errors"), + "warnings": obj.get("warnings"), + "infos": obj.get("infos"), + "hints": obj.get("hints") + }) + return _obj + + diff --git a/microservices/csitOasValidationApi/csit_validation/models/version_list.py b/microservices/csitOasValidationApi/csit_validation/models/version_list.py new file mode 100644 index 00000000..ebeeb910 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/models/version_list.py @@ -0,0 +1,86 @@ +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + + + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +class VersionList(BaseModel): + """ + Sorted list of version names (e.g., 'v1.0.0', 'v2.3.1') + """ + versions: List[StrictStr] = Field(description="The list of version names") + __properties: ClassVar[List[str]] = ["versions"] + + model_config = { + "populate_by_name": True, + "validate_assignment": True, + "protected_namespaces": (), + "json_schema_extra" : { + "examples": [ + { + "versions": [ + "v1.0.0", + "v2.3.1" + ] + } + ] + } + } + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of VersionList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + _dict = self.model_dump( + by_alias=True, + exclude={ + }, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Dict) -> Self: + """Create an instance of VersionList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "versions": obj.get("versions") + }) + return _obj + + diff --git a/microservices/csitOasValidationApi/csit_validation/services/cached_rulesets_service.py b/microservices/csitOasValidationApi/csit_validation/services/cached_rulesets_service.py new file mode 100644 index 00000000..7dad2d43 --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/services/cached_rulesets_service.py @@ -0,0 +1,141 @@ +import re +import logging +from pathlib import Path +from functools import cached_property +from typing import Optional, Tuple, Dict +from csit_validation.util.log_decorator import log_entry_exit + +logger = logging.getLogger(__name__) + + +class CachedRulesetsService: + """ + The CachedRulesetsService is a utility class that retrieves published versions of the CSIT API Governance Rules + and the rulesets available in each version from a cached version of the CSIT API Governance Rules Git repository. + + The CSIT API Governance Rules are maintained in the https://github.com/bcgov/csit-api-governance-spectral-style-guide + repository and the versions are identified by commit tags in the repository. + + The service determines the versions by obtaining a list of the repositories tags and filtering for tags that follow the format + ruleset-v where is a valid semantic verions. + + Example tags that will be identified as versions are: + ruleset-v1.0.0 + ruleset-v1.2.3 + ruleset-v1.3.0-Beta1 + ruleset-v1.1.0 + + Tags that are not prefixed with 'ruleset-' or do not follow semantic versions will be ignored. For example: + ruleset-junk-tag + junk-tag + + + Rulesets in each version are identified as yaml or yaml files within the 'spectral' directory in the root of the repository. + + For example: + spectral/basic-ruleset.yaml + spectral/sdx/ruleset.yml + """ + + @log_entry_exit(logger) + def __init__( + self, + github_tag_cache_path: Path, + version_tag_prefix: str, + ruleset_dir: str, + ): + + self.github_tag_cache_path = github_tag_cache_path + self.version_tag_prefix = version_tag_prefix + self.ruleset_dir = ruleset_dir + self.rules_file_extensions = [".yml", ".yaml"] + + self._ruleset_files_cache: Dict[str, Dict[str, str]] = {} + + @cached_property + @log_entry_exit(logger) + def get_valid_version_tags(self) -> Dict[str, str]: + """ + Returns map: version → full tag name + Discovers tags from local filesystem cache only once (cached_property) + """ + tag_dir = Path(self.github_tag_cache_path()) / "tags" + + if not tag_dir.is_dir(): + logger.warning(f"Tag cache directory not found: {tag_dir}") + return {} + + version_to_tag_map: Dict[str, str] = {} + semver_pattern = re.compile(r'^v\d+\.\d+\.\d+(-[\w\-.]+)?$') + + # Look for all directories that could represent tags + for entry in tag_dir.iterdir(): + if not entry.is_dir(): + continue + + tag_name = entry.name + + # Skip tags that don't match the expected prefix + if not tag_name.startswith(self.version_tag_prefix): + continue + + version_part = tag_name[len(self.version_tag_prefix):] + + # Validate semantic version pattern + if not semver_pattern.match(version_part): + continue + + # If we got here → valid version tag + version_to_tag_map[version_part] = tag_name + + return version_to_tag_map + + @log_entry_exit(logger) + async def get_ruleset_files_in_tag(self, tag: str) -> Dict[str, str]: + """ + Return map of ruleset identifier → full path + Key = filename without extension (e.g. "basic-ruleset") + """ + if tag in self._ruleset_files_cache: + return self._ruleset_files_cache[tag] + + tag_dir = Path(self.github_tag_cache_path()) / "tags" / tag + + if not tag_dir.is_dir(): + logger.warning(f"Tag directory not found in cache: {tag_dir}") + return {} + + ruleset_files_map: Dict[str, str] = {} + prefix_len = len(self.ruleset_dir) + 1 # +1 for '/' + + # Find all matching files recursively + for file_path in tag_dir.rglob("*"): + if not file_path.is_file(): + continue + + # Get path relative to the tag root + relative_to_tag = file_path.relative_to(tag_dir).as_posix() + + if ( + relative_to_tag.startswith(f"{self.ruleset_dir}/") + and any(relative_to_tag.lower().endswith(ext.strip()) for ext in self.rules_file_extensions) + ): + relative_path = relative_to_tag[prefix_len:] + last_dot = relative_path.rfind('.') + key = relative_path[:last_dot] if last_dot != -1 else relative_path + # Keep the original repo-style path (not filesystem absolute) + ruleset_files_map[key] = relative_to_tag + + logger.debug(f"Found {len(ruleset_files_map)} ruleset files in tag {tag}") + self._ruleset_files_cache[tag] = ruleset_files_map + return ruleset_files_map + + @log_entry_exit(logger) + async def get_ruleset_tuple(self, tag: str, ruleset: str) -> Optional[Tuple[str, str]]: + """Returns (ruleset_name, full_path_in_repo) or None""" + + rulesets = await self.get_ruleset_files_in_tag(tag) + if ruleset in rulesets: + return (ruleset, rulesets[ruleset]) + + return None \ No newline at end of file diff --git a/microservices/csitOasValidationApi/csit_validation/util/log_decorator.py b/microservices/csitOasValidationApi/csit_validation/util/log_decorator.py new file mode 100644 index 00000000..fa1aa52b --- /dev/null +++ b/microservices/csitOasValidationApi/csit_validation/util/log_decorator.py @@ -0,0 +1,57 @@ +from functools import wraps +import logging +import sys +import inspect +from typing import Callable, Any, Optional + + +def log_entry_exit(logger: Optional[logging.Logger] = None) -> Callable: + """ + Decorator that logs function entry/exit for both sync and async functions. + + Args: + logger: Optional logger instance to use. + If None, uses a logger named after the current module (__name__). + """ + # Default to module-level logger if none provided + if logger is None: + logger = logging.getLogger(__name__) + + def decorator(func: Callable) -> Callable: + @wraps(func) + async def async_wrapper(*args, **kwargs) -> Any: + func_name = func.__name__ + logger.info(f"<{func_name}") + + try: + result = await func(*args, **kwargs) + logger.info(f">{func_name} {result}") + return result + + except Exception: + exc_type = type(sys.exception()).__name__ + logger.error(f">{func_name} with exception: {exc_type}") + raise + + @wraps(func) + def sync_wrapper(*args, **kwargs) -> Any: + func_name = func.__name__ + logger.info(f"<{func_name}") + + try: + result = func(*args, **kwargs) + logger.info(f">{func_name} {result}") + return result + + except Exception: + exc_type = type(sys.exception()).__name__ + logger.error(f">{func_name} with exception: {exc_type}") + raise + + # Choose appropriate wrapper based on function type + if inspect.iscoroutinefunction(func): + return async_wrapper + else: + return sync_wrapper + + return decorator \ No newline at end of file diff --git a/microservices/csitOasValidationApi/entrypoint.sh b/microservices/csitOasValidationApi/entrypoint.sh new file mode 100755 index 00000000..a7ebd885 --- /dev/null +++ b/microservices/csitOasValidationApi/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +exec uvicorn csit_validation.main:app --host 0.0.0.0 --port 8080 \ No newline at end of file diff --git a/microservices/csitOasValidationApi/poetry.lock b/microservices/csitOasValidationApi/poetry.lock new file mode 100644 index 00000000..c3b77b1b --- /dev/null +++ b/microservices/csitOasValidationApi/poetry.lock @@ -0,0 +1,1080 @@ +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "annotated-doc" +version = "0.0.4" +description = "Document parameters, class attributes, return types, and variables inline, with Annotated." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, + {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.12.1" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c"}, + {file = "anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703"}, +] + +[package.dependencies] +idna = ">=2.8" + +[package.extras] +trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] + +[[package]] +name = "certifi" +version = "2026.1.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, +] + +[[package]] +name = "click" +version = "8.3.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "sys_platform == \"win32\""} + +[[package]] +name = "coverage" +version = "7.13.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "coverage-7.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4af3b01763909f477ea17c962e2cca8f39b350a4e46e3a30838b2c12e31b81b"}, + {file = "coverage-7.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36393bd2841fa0b59498f75466ee9bdec4f770d3254f031f23e8fd8e140ffdd2"}, + {file = "coverage-7.13.2-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9cc7573518b7e2186bd229b1a0fe24a807273798832c27032c4510f47ffdb896"}, + {file = "coverage-7.13.2-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ca9566769b69a5e216a4e176d54b9df88f29d750c5b78dbb899e379b4e14b30c"}, + {file = "coverage-7.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c9bdea644e94fd66d75a6f7e9a97bb822371e1fe7eadae2cacd50fcbc28e4dc"}, + {file = "coverage-7.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5bd447332ec4f45838c1ad42268ce21ca87c40deb86eabd59888859b66be22a5"}, + {file = "coverage-7.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7c79ad5c28a16a1277e1187cf83ea8dafdcc689a784228a7d390f19776db7c31"}, + {file = "coverage-7.13.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:76e06ccacd1fb6ada5d076ed98a8c6f66e2e6acd3df02819e2ee29fd637b76ad"}, + {file = "coverage-7.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:49d49e9a5e9f4dc3d3dac95278a020afa6d6bdd41f63608a76fa05a719d5b66f"}, + {file = "coverage-7.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed2bce0e7bfa53f7b0b01c722da289ef6ad4c18ebd52b1f93704c21f116360c8"}, + {file = "coverage-7.13.2-cp310-cp310-win32.whl", hash = "sha256:1574983178b35b9af4db4a9f7328a18a14a0a0ce76ffaa1c1bacb4cc82089a7c"}, + {file = "coverage-7.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:a360a8baeb038928ceb996f5623a4cd508728f8f13e08d4e96ce161702f3dd99"}, + {file = "coverage-7.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:060ebf6f2c51aff5ba38e1f43a2095e087389b1c69d559fde6049a4b0001320e"}, + {file = "coverage-7.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1ea8ca9db5e7469cd364552985e15911548ea5b69c48a17291f0cac70484b2e"}, + {file = "coverage-7.13.2-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b780090d15fd58f07cf2011943e25a5f0c1c894384b13a216b6c86c8a8a7c508"}, + {file = "coverage-7.13.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:88a800258d83acb803c38175b4495d293656d5fac48659c953c18e5f539a274b"}, + {file = "coverage-7.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6326e18e9a553e674d948536a04a80d850a5eeefe2aae2e6d7cf05d54046c01b"}, + {file = "coverage-7.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:59562de3f797979e1ff07c587e2ac36ba60ca59d16c211eceaa579c266c5022f"}, + {file = "coverage-7.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27ba1ed6f66b0e2d61bfa78874dffd4f8c3a12f8e2b5410e515ab345ba7bc9c3"}, + {file = "coverage-7.13.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8be48da4d47cc68754ce643ea50b3234557cbefe47c2f120495e7bd0a2756f2b"}, + {file = "coverage-7.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2a47a4223d3361b91176aedd9d4e05844ca67d7188456227b6bf5e436630c9a1"}, + {file = "coverage-7.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6f141b468740197d6bd38f2b26ade124363228cc3f9858bd9924ab059e00059"}, + {file = "coverage-7.13.2-cp311-cp311-win32.whl", hash = "sha256:89567798404af067604246e01a49ef907d112edf2b75ef814b1364d5ce267031"}, + {file = "coverage-7.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:21dd57941804ae2ac7e921771a5e21bbf9aabec317a041d164853ad0a96ce31e"}, + {file = "coverage-7.13.2-cp311-cp311-win_arm64.whl", hash = "sha256:10758e0586c134a0bafa28f2d37dd2cdb5e4a90de25c0fc0c77dabbad46eca28"}, + {file = "coverage-7.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f106b2af193f965d0d3234f3f83fc35278c7fb935dfbde56ae2da3dd2c03b84d"}, + {file = "coverage-7.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f45d21dc4d5d6bd29323f0320089ef7eae16e4bef712dff79d184fa7330af3"}, + {file = "coverage-7.13.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:fae91dfecd816444c74531a9c3d6ded17a504767e97aa674d44f638107265b99"}, + {file = "coverage-7.13.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:264657171406c114787b441484de620e03d8f7202f113d62fcd3d9688baa3e6f"}, + {file = "coverage-7.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae47d8dcd3ded0155afbb59c62bd8ab07ea0fd4902e1c40567439e6db9dcaf2f"}, + {file = "coverage-7.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a0b33e9fd838220b007ce8f299114d406c1e8edb21336af4c97a26ecfd185aa"}, + {file = "coverage-7.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b3becbea7f3ce9a2d4d430f223ec15888e4deb31395840a79e916368d6004cce"}, + {file = "coverage-7.13.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f819c727a6e6eeb8711e4ce63d78c620f69630a2e9d53bc95ca5379f57b6ba94"}, + {file = "coverage-7.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:4f7b71757a3ab19f7ba286e04c181004c1d61be921795ee8ba6970fd0ec91da5"}, + {file = "coverage-7.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b7fc50d2afd2e6b4f6f2f403b70103d280a8e0cb35320cbbe6debcda02a1030b"}, + {file = "coverage-7.13.2-cp312-cp312-win32.whl", hash = "sha256:292250282cf9bcf206b543d7608bda17ca6fc151f4cbae949fc7e115112fbd41"}, + {file = "coverage-7.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:eeea10169fac01549a7921d27a3e517194ae254b542102267bef7a93ed38c40e"}, + {file = "coverage-7.13.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a5b567f0b635b592c917f96b9a9cb3dbd4c320d03f4bf94e9084e494f2e8894"}, + {file = "coverage-7.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed75de7d1217cf3b99365d110975f83af0528c849ef5180a12fd91b5064df9d6"}, + {file = "coverage-7.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97e596de8fa9bada4d88fde64a3f4d37f1b6131e4faa32bad7808abc79887ddc"}, + {file = "coverage-7.13.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:68c86173562ed4413345410c9480a8d64864ac5e54a5cda236748031e094229f"}, + {file = "coverage-7.13.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7be4d613638d678b2b3773b8f687537b284d7074695a43fe2fbbfc0e31ceaed1"}, + {file = "coverage-7.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7f63ce526a96acd0e16c4af8b50b64334239550402fb1607ce6a584a6d62ce9"}, + {file = "coverage-7.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:406821f37f864f968e29ac14c3fccae0fec9fdeba48327f0341decf4daf92d7c"}, + {file = "coverage-7.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ee68e5a4e3e5443623406b905db447dceddffee0dceb39f4e0cd9ec2a35004b5"}, + {file = "coverage-7.13.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2ee0e58cca0c17dd9c6c1cdde02bb705c7b3fbfa5f3b0b5afeda20d4ebff8ef4"}, + {file = "coverage-7.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e5bbb5018bf76a56aabdb64246b5288d5ae1b7d0dd4d0534fe86df2c2992d1c"}, + {file = "coverage-7.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a55516c68ef3e08e134e818d5e308ffa6b1337cc8b092b69b24287bf07d38e31"}, + {file = "coverage-7.13.2-cp313-cp313-win32.whl", hash = "sha256:5b20211c47a8abf4abc3319d8ce2464864fa9f30c5fcaf958a3eed92f4f1fef8"}, + {file = "coverage-7.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:14f500232e521201cf031549fb1ebdfc0a40f401cf519157f76c397e586c3beb"}, + {file = "coverage-7.13.2-cp313-cp313-win_arm64.whl", hash = "sha256:9779310cb5a9778a60c899f075a8514c89fa6d10131445c2207fc893e0b14557"}, + {file = "coverage-7.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5a1e41ce5df6b547cbc3d3699381c9e2c2c369c67837e716ed0f549d48e"}, + {file = "coverage-7.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b01899e82a04085b6561eb233fd688474f57455e8ad35cd82286463ba06332b7"}, + {file = "coverage-7.13.2-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:838943bea48be0e2768b0cf7819544cdedc1bbb2f28427eabb6eb8c9eb2285d3"}, + {file = "coverage-7.13.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:93d1d25ec2b27e90bcfef7012992d1f5121b51161b8bffcda756a816cf13c2c3"}, + {file = "coverage-7.13.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93b57142f9621b0d12349c43fc7741fe578e4bc914c1e5a54142856cfc0bf421"}, + {file = "coverage-7.13.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f06799ae1bdfff7ccb8665d75f8291c69110ba9585253de254688aa8a1ccc6c5"}, + {file = "coverage-7.13.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f9405ab4f81d490811b1d91c7a20361135a2df4c170e7f0b747a794da5b7f23"}, + {file = "coverage-7.13.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f9ab1d5b86f8fbc97a5b3cd6280a3fd85fef3b028689d8a2c00918f0d82c728c"}, + {file = "coverage-7.13.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:f674f59712d67e841525b99e5e2b595250e39b529c3bda14764e4f625a3fa01f"}, + {file = "coverage-7.13.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c6cadac7b8ace1ba9144feb1ae3cb787a6065ba6d23ffc59a934b16406c26573"}, + {file = "coverage-7.13.2-cp313-cp313t-win32.whl", hash = "sha256:14ae4146465f8e6e6253eba0cccd57423e598a4cb925958b240c805300918343"}, + {file = "coverage-7.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9074896edd705a05769e3de0eac0a8388484b503b68863dd06d5e473f874fd47"}, + {file = "coverage-7.13.2-cp313-cp313t-win_arm64.whl", hash = "sha256:69e526e14f3f854eda573d3cf40cffd29a1a91c684743d904c33dbdcd0e0f3e7"}, + {file = "coverage-7.13.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:387a825f43d680e7310e6f325b2167dd093bc8ffd933b83e9aa0983cf6e0a2ef"}, + {file = "coverage-7.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f0d7fea9d8e5d778cd5a9e8fc38308ad688f02040e883cdc13311ef2748cb40f"}, + {file = "coverage-7.13.2-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e080afb413be106c95c4ee96b4fffdc9e2fa56a8bbf90b5c0918e5c4449412f5"}, + {file = "coverage-7.13.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a7fc042ba3c7ce25b8a9f097eb0f32a5ce1ccdb639d9eec114e26def98e1f8a4"}, + {file = "coverage-7.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0ba505e021557f7f8173ee8cd6b926373d8653e5ff7581ae2efce1b11ef4c27"}, + {file = "coverage-7.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7de326f80e3451bd5cc7239ab46c73ddb658fe0b7649476bc7413572d36cd548"}, + {file = "coverage-7.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abaea04f1e7e34841d4a7b343904a3f59481f62f9df39e2cd399d69a187a9660"}, + {file = "coverage-7.13.2-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9f93959ee0c604bccd8e0697be21de0887b1f73efcc3aa73a3ec0fd13feace92"}, + {file = "coverage-7.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:13fe81ead04e34e105bf1b3c9f9cdf32ce31736ee5d90a8d2de02b9d3e1bcb82"}, + {file = "coverage-7.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d6d16b0f71120e365741bca2cb473ca6fe38930bc5431c5e850ba949f708f892"}, + {file = "coverage-7.13.2-cp314-cp314-win32.whl", hash = "sha256:9b2f4714bb7d99ba3790ee095b3b4ac94767e1347fe424278a0b10acb3ff04fe"}, + {file = "coverage-7.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:e4121a90823a063d717a96e0a0529c727fb31ea889369a0ee3ec00ed99bf6859"}, + {file = "coverage-7.13.2-cp314-cp314-win_arm64.whl", hash = "sha256:6873f0271b4a15a33e7590f338d823f6f66f91ed147a03938d7ce26efd04eee6"}, + {file = "coverage-7.13.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f61d349f5b7cd95c34017f1927ee379bfbe9884300d74e07cf630ccf7a610c1b"}, + {file = "coverage-7.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a43d34ce714f4ca674c0d90beb760eb05aad906f2c47580ccee9da8fe8bfb417"}, + {file = "coverage-7.13.2-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bff1b04cb9d4900ce5c56c4942f047dc7efe57e2608cb7c3c8936e9970ccdbee"}, + {file = "coverage-7.13.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6ae99e4560963ad8e163e819e5d77d413d331fd00566c1e0856aa252303552c1"}, + {file = "coverage-7.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e79a8c7d461820257d9aa43716c4efc55366d7b292e46b5b37165be1d377405d"}, + {file = "coverage-7.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:060ee84f6a769d40c492711911a76811b4befb6fba50abb450371abb720f5bd6"}, + {file = "coverage-7.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bca209d001fd03ea2d978f8a4985093240a355c93078aee3f799852c23f561a"}, + {file = "coverage-7.13.2-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6b8092aa38d72f091db61ef83cb66076f18f02da3e1a75039a4f218629600e04"}, + {file = "coverage-7.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:4a3158dc2dcce5200d91ec28cd315c999eebff355437d2765840555d765a6e5f"}, + {file = "coverage-7.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3973f353b2d70bd9796cc12f532a05945232ccae966456c8ed7034cb96bbfd6f"}, + {file = "coverage-7.13.2-cp314-cp314t-win32.whl", hash = "sha256:79f6506a678a59d4ded048dc72f1859ebede8ec2b9a2d509ebe161f01c2879d3"}, + {file = "coverage-7.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:196bfeabdccc5a020a57d5a368c681e3a6ceb0447d153aeccc1ab4d70a5032ba"}, + {file = "coverage-7.13.2-cp314-cp314t-win_arm64.whl", hash = "sha256:69269ab58783e090bfbf5b916ab3d188126e22d6070bbfc93098fdd474ef937c"}, + {file = "coverage-7.13.2-py3-none-any.whl", hash = "sha256:40ce1ea1e25125556d8e76bd0b61500839a07944cc287ac21d5626f3e620cad5"}, + {file = "coverage-7.13.2.tar.gz", hash = "sha256:044c6951ec37146b72a50cc81ef02217d27d4c3640efd2640311393cbbf143d3"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "fastapi" +version = "0.128.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d"}, + {file = "fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a"}, +] + +[package.dependencies] +annotated-doc = ">=0.0.2" +pydantic = ">=2.7.0" +starlette = ">=0.40.0,<0.51.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-offline" +version = "1.7.6" +description = "FastAPI without reliance on CDNs for docs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi_offline-1.7.6-py3-none-any.whl", hash = "sha256:24d6851b5a94c50f669594b7ab9d1bbe3a4c0a53c4f4e9ce47798ff4591790d2"}, + {file = "fastapi_offline-1.7.6.tar.gz", hash = "sha256:c84d08584faa646932951b493106992caa79b838e454f14dd410cef9a1a5e07d"}, +] + +[package.dependencies] +fastapi = ">=0.99.0" + +[package.extras] +test = ["pytest", "requests", "starlette[full]"] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httptools" +version = "0.7.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "httptools-0.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:11d01b0ff1fe02c4c32d60af61a4d613b74fad069e47e06e9067758c01e9ac78"}, + {file = "httptools-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d86c1e5afdc479a6fdabf570be0d3eb791df0ae727e8dbc0259ed1249998d4"}, + {file = "httptools-0.7.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8c751014e13d88d2be5f5f14fc8b89612fcfa92a9cc480f2bc1598357a23a05"}, + {file = "httptools-0.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:654968cb6b6c77e37b832a9be3d3ecabb243bbe7a0b8f65fbc5b6b04c8fcabed"}, + {file = "httptools-0.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b580968316348b474b020edf3988eecd5d6eec4634ee6561e72ae3a2a0e00a8a"}, + {file = "httptools-0.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d496e2f5245319da9d764296e86c5bb6fcf0cf7a8806d3d000717a889c8c0b7b"}, + {file = "httptools-0.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cbf8317bfccf0fed3b5680c559d3459cccf1abe9039bfa159e62e391c7270568"}, + {file = "httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657"}, + {file = "httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70"}, + {file = "httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df"}, + {file = "httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e"}, + {file = "httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274"}, + {file = "httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec"}, + {file = "httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb"}, + {file = "httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5"}, + {file = "httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5"}, + {file = "httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03"}, + {file = "httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2"}, + {file = "httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362"}, + {file = "httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c"}, + {file = "httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321"}, + {file = "httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3"}, + {file = "httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca"}, + {file = "httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c"}, + {file = "httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66"}, + {file = "httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346"}, + {file = "httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650"}, + {file = "httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6"}, + {file = "httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270"}, + {file = "httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3"}, + {file = "httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1"}, + {file = "httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b"}, + {file = "httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60"}, + {file = "httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca"}, + {file = "httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96"}, + {file = "httptools-0.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ac50afa68945df63ec7a2707c506bd02239272288add34539a2ef527254626a4"}, + {file = "httptools-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de987bb4e7ac95b99b805b99e0aae0ad51ae61df4263459d36e07cf4052d8b3a"}, + {file = "httptools-0.7.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d169162803a24425eb5e4d51d79cbf429fd7a491b9e570a55f495ea55b26f0bf"}, + {file = "httptools-0.7.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49794f9250188a57fa73c706b46cb21a313edb00d337ca4ce1a011fe3c760b28"}, + {file = "httptools-0.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aeefa0648362bb97a7d6b5ff770bfb774930a327d7f65f8208394856862de517"}, + {file = "httptools-0.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0d92b10dbf0b3da4823cde6a96d18e6ae358a9daa741c71448975f6a2c339cad"}, + {file = "httptools-0.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:5ddbd045cfcb073db2449563dd479057f2c2b681ebc232380e63ef15edc9c023"}, + {file = "httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9"}, +] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.3.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "packaging" +version = "26.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, + {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.4.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, + {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5"}, + {file = "pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5"}, +] + +[package.dependencies] +pytest = ">=8.2,<10" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"}, + {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + +[[package]] +name = "respx" +version = "0.22.0" +description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0"}, + {file = "respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91"}, +] + +[package.dependencies] +httpx = ">=0.25.0" + +[[package]] +name = "semver" +version = "3.0.4" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, + {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "starlette" +version = "0.50.0" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca"}, + {file = "starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "uvicorn" +version = "0.30.6" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.22.1" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.1" +groups = ["main"] +markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ef6f0d4cc8a9fa1f6a910230cd53545d9a14479311e87e3cb225495952eb672c"}, + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7cd375a12b71d33d46af85a3343b35d98e8116134ba404bd657b3b1d15988792"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac33ed96229b7790eb729702751c0e93ac5bc3bcf52ae9eccbff30da09194b86"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:481c990a7abe2c6f4fc3d98781cc9426ebd7f03a9aaa7eb03d3bfc68ac2a46bd"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a592b043a47ad17911add5fbd087c76716d7c9ccc1d64ec9249ceafd735f03c2"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1489cf791aa7b6e8c8be1c5a080bae3a672791fcb4e9e12249b05862a2ca9cec"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:80eee091fe128e425177fbd82f8635769e2f32ec9daf6468286ec57ec0313efa"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:017bd46f9e7b78e81606329d07141d3da446f8798c6baeec124260e22c262772"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3e5c6727a57cb6558592a95019e504f605d1c54eb86463ee9f7a2dbd411c820"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:57df59d8b48feb0e613d9b1f5e57b7532e97cbaf0d61f7aa9aa32221e84bc4b6"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:55502bc2c653ed2e9692e8c55cb95b397d33f9f2911e929dc97c4d6b26d04242"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4a968a72422a097b09042d5fa2c5c590251ad484acf910a651b4b620acd7f193"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b45649628d816c030dba3c80f8e2689bab1c89518ed10d426036cdc47874dfc4"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea721dd3203b809039fcc2983f14608dae82b212288b346e0bfe46ec2fab0b7c"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ae676de143db2b2f60a9696d7eca5bb9d0dd6cc3ac3dad59a8ae7e95f9e1b54"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17d4e97258b0172dfa107b89aa1eeba3016f4b1974ce85ca3ef6a66b35cbf659"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:05e4b5f86e621cf3927631789999e697e58f0d2d32675b67d9ca9eb0bca55743"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:286322a90bea1f9422a470d5d2ad82d38080be0a29c4dd9b3e6384320a4d11e7"}, + {file = "uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx_rtd_theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=6.1,<7.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=25.3.0,<25.4.0)", "pycodestyle (>=2.11.0,<2.12.0)"] + +[[package]] +name = "watchfiles" +version = "1.1.1" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c"}, + {file = "watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca65483439f9c791897f7db49202301deb6e15fe9f8fe2fed555bf986d10c31"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f0ab1c1af0cb38e3f598244c17919fb1a84d1629cc08355b0074b6d7f53138ac"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bc570d6c01c206c46deb6e935a260be44f186a2f05179f52f7fcd2be086a94d"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e84087b432b6ac94778de547e08611266f1f8ffad28c0ee4c82e028b0fc5966d"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:620bae625f4cb18427b1bb1a2d9426dc0dd5a5ba74c7c2cdb9de405f7b129863"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab"}, + {file = "watchfiles-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bbe1ef33d45bc71cf21364df962af171f96ecaeca06bd9e3d0b583efb12aec82"}, + {file = "watchfiles-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a0bb430adb19ef49389e1ad368450193a90038b5b752f4ac089ec6942c4dff4"}, + {file = "watchfiles-1.1.1-cp310-cp310-win32.whl", hash = "sha256:3f6d37644155fb5beca5378feb8c1708d5783145f2a0f1c4d5a061a210254844"}, + {file = "watchfiles-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:a36d8efe0f290835fd0f33da35042a1bb5dc0e83cbc092dcf69bce442579e88e"}, + {file = "watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5"}, + {file = "watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606"}, + {file = "watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701"}, + {file = "watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10"}, + {file = "watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849"}, + {file = "watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4"}, + {file = "watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e"}, + {file = "watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d"}, + {file = "watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803"}, + {file = "watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94"}, + {file = "watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43"}, + {file = "watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9"}, + {file = "watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9"}, + {file = "watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404"}, + {file = "watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18"}, + {file = "watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d"}, + {file = "watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b"}, + {file = "watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374"}, + {file = "watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0"}, + {file = "watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42"}, + {file = "watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18"}, + {file = "watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da"}, + {file = "watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77"}, + {file = "watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef"}, + {file = "watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf"}, + {file = "watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5"}, + {file = "watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05"}, + {file = "watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6"}, + {file = "watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81"}, + {file = "watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b"}, + {file = "watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a"}, + {file = "watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02"}, + {file = "watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21"}, + {file = "watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c"}, + {file = "watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099"}, + {file = "watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01"}, + {file = "watchfiles-1.1.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c882d69f6903ef6092bedfb7be973d9319940d56b8427ab9187d1ecd73438a70"}, + {file = "watchfiles-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d6ff426a7cb54f310d51bfe83fe9f2bbe40d540c741dc974ebc30e6aa238f52e"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79ff6c6eadf2e3fc0d7786331362e6ef1e51125892c75f1004bd6b52155fb956"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1f5210f1b8fc91ead1283c6fd89f70e76fb07283ec738056cf34d51e9c1d62c"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9c4702f29ca48e023ffd9b7ff6b822acdf47cb1ff44cb490a3f1d5ec8987e9c"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acb08650863767cbc58bca4813b92df4d6c648459dcaa3d4155681962b2aa2d3"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08af70fd77eee58549cd69c25055dc344f918d992ff626068242259f98d598a2"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c3631058c37e4a0ec440bf583bc53cdbd13e5661bb6f465bc1d88ee9a0a4d02"}, + {file = "watchfiles-1.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cf57a27fb986c6243d2ee78392c503826056ffe0287e8794503b10fb51b881be"}, + {file = "watchfiles-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d7e7067c98040d646982daa1f37a33d3544138ea155536c2e0e63e07ff8a7e0f"}, + {file = "watchfiles-1.1.1-cp39-cp39-win32.whl", hash = "sha256:6c9c9262f454d1c4d8aaa7050121eb4f3aea197360553699520767daebf2180b"}, + {file = "watchfiles-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:74472234c8370669850e1c312490f6026d132ca2d396abfad8830b4f1c096957"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:17ef139237dfced9da49fb7f2232c86ca9421f666d78c264c7ffca6601d154c3"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:672b8adf25b1a0d35c96b5888b7b18699d27d4194bac8beeae75be4b7a3fc9b2"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a13aea58bc2b90173bc69f2a90de8e282648939a00a602e1dc4ee23e26b66d"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b495de0bb386df6a12b18335a0285dda90260f51bdb505503c02bcd1ce27a8b"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdab464fee731e0884c35ae3588514a9bcf718d0e2c82169c1c4a85cc19c3c7f"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3dbd8cbadd46984f802f6d479b7e3afa86c42d13e8f0f322d669d79722c8ec34"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5524298e3827105b61951a29c3512deb9578586abf3a7c5da4a8069df247cccc"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b943d3668d61cfa528eb949577479d3b077fd25fb83c641235437bc0b5bc60e"}, + {file = "watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "16.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "websockets-16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a"}, + {file = "websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0"}, + {file = "websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957"}, + {file = "websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72"}, + {file = "websockets-16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde"}, + {file = "websockets-16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3"}, + {file = "websockets-16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3"}, + {file = "websockets-16.0-cp310-cp310-win32.whl", hash = "sha256:37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9"}, + {file = "websockets-16.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35"}, + {file = "websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8"}, + {file = "websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad"}, + {file = "websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d"}, + {file = "websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe"}, + {file = "websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b"}, + {file = "websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5"}, + {file = "websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64"}, + {file = "websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6"}, + {file = "websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac"}, + {file = "websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00"}, + {file = "websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79"}, + {file = "websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39"}, + {file = "websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c"}, + {file = "websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f"}, + {file = "websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1"}, + {file = "websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2"}, + {file = "websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89"}, + {file = "websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea"}, + {file = "websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9"}, + {file = "websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230"}, + {file = "websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c"}, + {file = "websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5"}, + {file = "websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82"}, + {file = "websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8"}, + {file = "websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f"}, + {file = "websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a"}, + {file = "websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156"}, + {file = "websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0"}, + {file = "websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904"}, + {file = "websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4"}, + {file = "websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e"}, + {file = "websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4"}, + {file = "websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1"}, + {file = "websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3"}, + {file = "websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8"}, + {file = "websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d"}, + {file = "websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244"}, + {file = "websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e"}, + {file = "websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641"}, + {file = "websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8"}, + {file = "websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e"}, + {file = "websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944"}, + {file = "websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206"}, + {file = "websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6"}, + {file = "websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd"}, + {file = "websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d"}, + {file = "websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03"}, + {file = "websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da"}, + {file = "websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c"}, + {file = "websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767"}, + {file = "websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec"}, + {file = "websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5"}, +] + +[metadata] +lock-version = "2.1" +python-versions = "^3.14" +content-hash = "16ca72f7a8883fa38de3cfaab4b14bf65e9cff26409706128bef68dd827f82aa" diff --git a/microservices/csitOasValidationApi/pyproject.toml b/microservices/csitOasValidationApi/pyproject.toml new file mode 100644 index 00000000..1eeff67c --- /dev/null +++ b/microservices/csitOasValidationApi/pyproject.toml @@ -0,0 +1,26 @@ +[tool.poetry] +name = "csit-oas-validation-api" +version = "0.1.0" +description = "OpenAPI validation service using BCGov Spectral rulesets" +authors = ["phowells "] +readme = "README.md" +package-mode = false + +[tool.poetry.dependencies] +python = "^3.14" +fastapi = "^0.128.0" +uvicorn = {extras = ["standard"], version = "^0.30.0"} +semver = "^3.0.4" +packaging = "^26.0" +fastapi-offline = "^1.7.6" + +[tool.poetry.group.dev.dependencies] +httpx = "^0.27.0" +pytest = "^8.0" +respx = "^0.22.0" +pytest-asyncio = "^1.3.0" +coverage = "^7.13.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/microservices/csitOasValidationApi/tests/conftest.py b/microservices/csitOasValidationApi/tests/conftest.py new file mode 100644 index 00000000..23e65638 --- /dev/null +++ b/microservices/csitOasValidationApi/tests/conftest.py @@ -0,0 +1,186 @@ +""" +Shared test fixtures and utilities for the entire test suite +""" + +import json +import logging +import os +import sys + +# Add the project root to the Python path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +import pytest +from httpx import Request, Response +import respx + +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from csit_validation.main import app as application + + +# ── Your existing app/client fixtures ─────────────────────────────────────── + +@pytest.fixture +def app() -> FastAPI: + application.dependency_overrides = {} + return application + + +@pytest.fixture +def client(app) -> TestClient: + class LoggingTestClient(TestClient): + def request(self, method, url, **kwargs): + # ── Log REQUEST ───────────────────────────────────────────────────── + headers = kwargs.get("headers") or {} + sorted_headers = sorted(headers.items(), key=lambda x: x[0].lower()) + + message = f"→ REQUEST:\n{method.upper()} {url}\n" + message += " Headers:\n" + message += "\n".join(f" {key}: {value}" for key, value in sorted_headers) + + # Handle request body + body_raw = kwargs.get("content") or kwargs.get("data") or kwargs.get("json") + if body_raw: + body_str = self._format_body(body_raw) + message += f"\n Body:\n{body_str}" + + http_logger.debug(message) + + # ── Send request ───────────────────────────────────────────────────── + response = super().request(method, url, **kwargs) + + # ── Log RESPONSE ───────────────────────────────────────────────────── + sorted_headers = sorted(response.headers.items(), key=lambda x: x[0].lower()) + + message = f"← RESPONSE:\n{response.status_code} {response.reason_phrase or ''}\n" + message += " Headers:\n" + message += "\n".join(f" {key}: {value}" for key, value in sorted_headers) + + # Handle response body + if response.content: + body_str = self._format_body(response.content) + message += f"\n Body:\n{body_str}" + + http_logger.debug(message) + + return response + + def _format_body(self, raw_body: any) -> str: + """Format body for logging: pretty-print JSON when possible""" + if isinstance(raw_body, (dict, list)): + # Already parsed data (e.g. json=...) + try: + return " " + json.dumps(raw_body, indent=2, ensure_ascii=False).replace("\n", "\n ") + except Exception: + return f" " + + if isinstance(raw_body, (bytes, bytearray)): + try: + text = raw_body.decode("utf-8", errors="replace").strip() + except Exception: + return f" " + + # Try to parse as JSON and pretty-print + try: + data = json.loads(text) + pretty = json.dumps(data, indent=2, ensure_ascii=False) + return " " + pretty.replace("\n", "\n ") + except json.JSONDecodeError: + # Not JSON → return plain text (indented) + return " " + text.replace("\n", "\n ") + + # Fallback for other types + return f" {str(raw_body)}" + + return LoggingTestClient(app) + + +# ── HTTP request/response logging helper ───────────────────────────────────── + +http_logger = logging.getLogger("test.http.detail") +http_logger.setLevel(logging.DEBUG) + + +def log_request(request: Request): + body_str = None + if request.content: + try: + body_str = request.content.decode("utf-8") + except UnicodeDecodeError: + body_str = f"" + + # Sort headers alphabetically for deterministic output + sorted_headers = sorted(request.headers.items()) + + message = f"→ REQUEST: {request.method} {request.url}\n" + message += " Headers:\n" + message += "\n".join(f" {key}: {value}" for key, value in sorted_headers) + if body_str: + message += f"\n Body:\n {body_str}" + + http_logger.debug(message) + + +def log_response(response: Response): + body_str = None + if response.content: + try: + body_str = response.content.decode("utf-8") + except UnicodeDecodeError: + body_str = f"" + + # Sort headers alphabetically for deterministic output + sorted_headers = sorted(response.headers.items()) + + message = f"← RESPONSE: {response.status_code} {response.reason_phrase}\n" + message += " Headers:\n" + message += "\n".join(f" {key}: {value}" for key, value in sorted_headers) + if body_str: + message += f"\n Body:\n {body_str}" + + http_logger.debug(message) + + +@pytest.fixture +def enable_http_logging(): + """ + Fixture to enable detailed HTTP logging for all mocked routes in the test. + Use with @pytest.mark.usefixtures("enable_http_logging") + """ + original_side_effect = None + + def _wrap_side_effect(original): + async def wrapped(request, **kwargs): + resp = kwargs.get("response") or original(request, **kwargs) + log_request(request) + log_response(resp) + return resp + return wrapped + + # Apply logging to any new route created during this test + def logged_route(*args, **kwargs): + route = respx.route(*args, **kwargs) + if route._side_effect: + original_side_effect = route._side_effect + route.side_effect = _wrap_side_effect(original_side_effect) + else: + # If no side effect, wrap the default response + route.side_effect = _wrap_side_effect(lambda r: route.default_response) + return route + + original_route = respx.route + respx.route = logged_route + + yield + + # Cleanup after test + respx.route = original_route + + +# fixture to force debug logging output +@pytest.fixture +def http_debug(caplog): + caplog.set_level(logging.DEBUG, logger="test.http.detail") + logging.getLogger("httpx").setLevel(logging.DEBUG) \ No newline at end of file diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/junk-tag/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/junk-tag/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-junk-tag/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-junk-tag/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.0.0/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.0.0/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/basic-ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/basic-ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/private/ruleset-a.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/private/ruleset-a.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/private/ruleset-b.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/private/ruleset-b.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/shared/ruleset-a.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/shared/ruleset-a.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/shared/ruleset-b.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/internal/shared/ruleset-b.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/sdx/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/sdx/ruleset.yaml new file mode 100644 index 00000000..9020d8a5 --- /dev/null +++ b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/sdx/ruleset.yaml @@ -0,0 +1,22 @@ +extends: spectral:oas + +rules: + # Your custom rule - MUST have given + then + operation-id-camel-case: + description: operationId should be camelCase (starts with lowercase letter, no separators) + severity: info + given: $..operationId + then: + function: pattern + functionOptions: + match: ^[a-z][a-zA-Z0-9]*$ + + # Disable noisy built-ins - short syntax is allowed + oas3-api-servers: off + info-contact: off + operation-description: off + operation-tag-defined: off + operation-success-response: off + + # Keep hint for missing info description + info-description: hint \ No newline at end of file diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/strict-ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.1.0/spectral/strict-ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.2.3/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.2.3/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.3.0-Beta1/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.3.0-Beta1/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.3.0-beta1/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-v1.3.0-beta1/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-vjunk-tag/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/ruleset-vjunk-tag/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/v1.3.0/ruleset.yaml b/microservices/csitOasValidationApi/tests/unit/resources/github-cache/tags/v1.3.0/ruleset.yaml new file mode 100644 index 00000000..e69de29b diff --git a/microservices/csitOasValidationApi/tests/unit/resources/test-oas-1.yaml b/microservices/csitOasValidationApi/tests/unit/resources/test-oas-1.yaml new file mode 100644 index 00000000..f82da3e3 --- /dev/null +++ b/microservices/csitOasValidationApi/tests/unit/resources/test-oas-1.yaml @@ -0,0 +1,52 @@ +openapi: 3.1.0 +info: + title: Sample API + version: 1.0.0 + +paths: + /users: + get: + operationId: getAllUsers + tags: + - "get" + summary: List all users + description: "" + responses: + '200': + description: OK + content: + application/json: + schema: + type: array + items: true + + post: + operationId: createNewUser-bad + tags: + - "create" + summary: Create user + requestBody: + required: true + content: + application/json: + schema: + type: object + responses: + 201: + description: Created + default: + description: Created + + /users/{id}: + get: + operationId: getUserById_bad + summary: Get user by ID + parameters: + - name: id + in: path + required: true + schema: + type: string + responses: + '200': + description: User found \ No newline at end of file diff --git a/microservices/csitOasValidationApi/tests/unit/test_discovery_api.py b/microservices/csitOasValidationApi/tests/unit/test_discovery_api.py new file mode 100644 index 00000000..e8d7dbfb --- /dev/null +++ b/microservices/csitOasValidationApi/tests/unit/test_discovery_api.py @@ -0,0 +1,135 @@ +""" +API-level unit tests for the Discovery endpoints +Tests how the service responds via HTTP under various conditions. +No direct instantiation of DiscoveryApiImpl — all via TestClient. +""" + +import pytest +import respx +from pathlib import Path +from fastapi.testclient import TestClient +from csit_validation.core.config import get_github_tag_cache_path +from urllib.parse import quote + +def urlquote(s: str) -> str: + """Encode everything, including / → %2F (no safe characters)""" + return quote(s, safe='') + +class TestDiscoveryApi: + + # ── Successful version list → 200 OK with sorted versions ────────────── + + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") # ← if you want logs + def test_list_versions_success_200(self, client: TestClient, caplog, monkeypatch): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + response = client.get("/versions") + + assert response.status_code == 200 + data = response.json() + assert "versions" in data + assert data["versions"] == ["v1.3.0-beta1","v1.3.0-Beta1","v1.2.3","v1.1.0","v1.0.0"] # sorted newest first + + assert "200" in caplog.text + + # ── Empty version list → 200 OK with empty array ─────────────────────── + + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + def test_list_versions_empty_success_200(self, client: TestClient, caplog, monkeypatch): + + cache_dir = Path(__file__).parent / "resources" / "github-cache-empty" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + response = client.get("/versions") + + assert response.status_code == 200 + data = response.json() + assert "versions" in data + assert data["versions"] == [] # empty + + assert "200" in caplog.text + + # Version (tag) not found → 404 Not Found + @respx.mock + def test_list_rulesets_version_not_found_404(self, client: TestClient, monkeypatch): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + version = "v1.3.0" # Does not have the prefix + + response = client.get(f"/versions/{version}/rulesets") + + assert response.status_code == 404 + data = response.json() + assert "detail" in data + assert f"Version '{version}' not found" in data["detail"] + + # Successful ruleset list → 200 OK with sorted rulesets + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + def test_list_rulesets_success_200(self, client: TestClient, caplog, monkeypatch): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + version = "v1.1.0" + + response = client.get(f"/versions/{urlquote(version)}/rulesets") + + assert response.status_code == 200 + data = response.json() + assert "version" in data + assert data["version"] == version + assert "rulesets" in data + assert data["rulesets"] == [ + "basic-ruleset", + "strict-ruleset", + "internal/private/ruleset-a", + "internal/private/ruleset-b", + "internal/shared/ruleset-a", + "internal/shared/ruleset-b", + "sdx/ruleset" + ] # sorted alphabetically + + assert "200" in caplog.text + + # Successful ruleset list → 200 OK with no rulesets + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + def test_list_rulesets_empty_success_200(self, client: TestClient, caplog, monkeypatch): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + version = "v1.0.0" + + response = client.get(f"/versions/{urlquote(version)}/rulesets") + + assert response.status_code == 200 + data = response.json() + assert "version" in data + assert data["version"] == version + assert "rulesets" in data + assert data["rulesets"] == [ ] + + assert "200" in caplog.text \ No newline at end of file diff --git a/microservices/csitOasValidationApi/tests/unit/test_validation_api.py b/microservices/csitOasValidationApi/tests/unit/test_validation_api.py new file mode 100644 index 00000000..5898c4e8 --- /dev/null +++ b/microservices/csitOasValidationApi/tests/unit/test_validation_api.py @@ -0,0 +1,305 @@ +""" +API-level unit tests for the Validation endpoints +Tests how the service responds via HTTP under various conditions. +No direct instantiation of ValidationApiImpl — all via TestClient. +""" + +import logging +import pytest +import respx +import textwrap +from pathlib import Path +from fastapi.testclient import TestClient +from pathlib import Path +from csit_validation.core.config import get_github_tag_cache_path +from urllib.parse import quote + +def urlquote(s: str) -> str: + """Encode everything, including / → %2F (no safe characters)""" + return quote(s, safe='') + +logger = logging.getLogger(__name__) + +class TestDiscoveryApi: + + test_version = "v1.1.0" + + test_ruleset = "sdx/ruleset" + + # ── Helper to locate resources directory ───────────────────────────────── + @pytest.fixture(scope="class") + def resources_dir(self): + # From tests/unit/ → tests/ → resources/ + return Path(__file__).parent / "resources" + + # ── Version not found → 404 Not Found ────────────── + + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + def test_create_validation_version_not_found_404( + self, + client: TestClient, + resources_dir, + monkeypatch, + ): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + invalid_version = "v1.3.0-alpha" + + # ------------------------- + # Load OpenAPI spec + # ------------------------- + spec_path = resources_dir / "test-oas-1.yaml" + if not spec_path.exists(): + pytest.fail(f"OpenAPI spec file not found at: {spec_path}") + + openapi_content = spec_path.read_text(encoding="utf-8") + + response = client.post( + f"/versions/{urlquote(invalid_version)}/rulesets/{urlquote(self.test_ruleset)}/validations", + content=openapi_content.encode("utf-8"), + headers={"Content-Type": "application/yaml"} + ) + + assert response.status_code == 404 + data = response.json() + assert "detail" in data + assert f"Version '{invalid_version}' not found" in data["detail"] + + # ── Ruleset not found → 404 Not Found ────────────── + + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + def test_create_validation_ruleset_not_found_404( + self, + client: TestClient, + resources_dir, + monkeypatch, + ): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + invalid_ruleset = "invalid-ruleset" + + # ------------------------- + # Load OpenAPI spec + # ------------------------- + spec_path = resources_dir / "test-oas-1.yaml" + if not spec_path.exists(): + pytest.fail(f"OpenAPI spec file not found at: {spec_path}") + + openapi_content = spec_path.read_text(encoding="utf-8") + + response = client.post( + f"/versions/{urlquote(self.test_version)}/rulesets/{urlquote(invalid_ruleset)}/validations", + content=openapi_content.encode("utf-8"), + headers={"Content-Type": "application/yaml"} + ) + + assert response.status_code == 404 + data = response.json() + assert "detail" in data + assert f"Ruleset '{invalid_ruleset}' not found for Version '{self.test_version}'" in data["detail"] + + # ── Request missing body → 400 Bad Request ────────────── + + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + def test_create_validation_missing_body_400( + self, + client: TestClient, + monkeypatch, + ): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + response = client.post(f"/versions/{urlquote(self.test_version)}/rulesets/{urlquote(self.test_ruleset)}/validations") + + assert response.status_code == 400 + problem = response.json() + + # Check required RFC 9457 fields + assert problem["type"] == "tag:validation-errors" + assert problem["title"] == "Bad Request" + assert problem["status"] == 400 + + # Check the errors array + assert "errors" in problem, "Problem detail should contain 'errors' array" + assert len(problem["errors"]) == 1, "Expected exactly one error for missing body" + + error = problem["errors"][0] + + # Validate the individual error item + assert error["location"] == "body" + assert error["code"] == "MISSING_BODY" + assert error["message"] == "Request body is required and cannot be empty" + assert error["type"] == "tag:validation-error" + + # Optional: check that no unnecessary fields are present + assert "field" not in error, "field should not be present for body-level errors" + assert "received" not in error, "received should not be present when body is missing" + + @respx.mock + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + def test_create_validation_unsupported_content_type_415( + self, + client: TestClient, + monkeypatch, + ): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + # Properties file + properties_content = textwrap.dedent("""\ + # Database connection settings + db.host=localhost + db.port=5432 + db.name=app_production + db.user=app_user + """).strip() + + # Send as application/x-java-properties + response = client.post( + f"/versions/{urlquote(self.test_version)}/rulesets/{urlquote(self.test_ruleset)}/validations", + content=properties_content.encode("utf-8"), + headers={ + "Content-Type": "application/x-java-properties" + } + ) + + # Expect 415 Unsupported Media Type + assert response.status_code == 415 + + problem = response.json() + + # Check required RFC 9457 fields + assert problem["type"] == "tag:validation-errors" + assert problem["title"] == "Unsupported Media Type" + assert problem["status"] == 415 + + # Check the errors array + assert "errors" in problem, "Problem detail should contain 'errors' array" + assert len(problem["errors"]) == 1, "Expected exactly one error for unsupported media type" + + error = problem["errors"][0] + + # Validate the individual error item + assert error["location"] == "header" + assert error["code"] == "UNSUPPORTED_MEDIA_TYPE" + assert error["message"] == "Only JSON and YAML are supported" + assert error["type"] == "tag:validation-error" + assert error["field"] == "content-type" + assert error["received"] == "application/x-java-properties" + + # Optional: check no irrelevant fields + assert "pointer" not in error, "pointer should not be present for header errors" + + @pytest.mark.usefixtures("enable_http_logging", "http_debug") + @respx.mock + def test_create_validation_success_200( + self, + client: "TestClient", + resources_dir, + monkeypatch, + ): + + cache_dir = Path(__file__).parent / "resources" / "github-cache" + monkeypatch.setenv("GITHUB_TAG_CACHE_PATH", str(cache_dir.resolve())) + + # Invalidate the cache so next read sees the new env value + get_github_tag_cache_path.cache_clear() + + # ------------------------- + # Load OpenAPI spec + # ------------------------- + spec_path = resources_dir / "test-oas-1.yaml" + if not spec_path.exists(): + pytest.fail(f"OpenAPI spec file not found at: {spec_path}") + + openapi_content = spec_path.read_text(encoding="utf-8") + + # ------------------------- + # Make API request + # ------------------------- + + response = client.post( + f"/versions/{urlquote(self.test_version)}/rulesets/{urlquote(self.test_ruleset)}/validations", + content=openapi_content.encode("utf-8"), + headers={"Content-Type": "application/yaml"} + ) + + # ------------------------- + # Assertions + # ------------------------- + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + data = response.json() + + # Basic structure + assert data["valid"] is False + assert data["version"] == self.test_version + assert data["ruleset"] == self.test_ruleset + + # Summary counts - match your latest response + summary = data["summary"] + assert summary["errors"] == 1, f"Expected 1 error, got {summary['errors']}" + assert summary["warnings"] == 1, f"Expected 1 warning, got {summary['warnings']}" + assert summary["infos"] == 2, f"Expected 2 infos, got {summary['infos']}" + assert summary["hints"] == 1, f"Expected 1 hint, got {summary['hints']}" + + # Results array + results = data["results"] + assert len(results) == 5 + + index = 0 + # Result 3 - parser (error) + assert results[index]["code"] == "parser" + assert results[index]["message"] == "Mapping key must be a string scalar rather than number" + assert results[index]["severity"] == "error" + assert results[index]["path"] == ["paths", "/users", "post", "responses", "201"] + + index = index + 1 + # Result 4 - operation-tags (warn) + assert results[index]["code"] == "operation-tags" + assert results[index]["message"] == "Operation must have non-empty \"tags\" array." + assert results[index]["severity"] == "warn" + assert results[index]["path"] == ["paths", "/users/{id}", "get"] + + index = index + 1 + # Result 2 - operation-id-camel-case (info) + assert results[index]["code"] == "operation-id-camel-case" + assert results[index]["message"] == "operationId should be camelCase (starts with lowercase letter, no separators)" + assert results[index]["severity"] == "info" + assert results[index]["path"] == ["paths", "/users", "post", "operationId"] + + index = index + 1 + # Result 5 - operation-id-camel-case (info) + assert results[index]["code"] == "operation-id-camel-case" + assert results[index]["message"] == "operationId should be camelCase (starts with lowercase letter, no separators)" + assert results[index]["severity"] == "info" + assert results[index]["path"] == ["paths", "/users/{id}", "get", "operationId"] + + index = index + 1 + # Result 1 - info-description (hint) + assert results[index]["code"] == "info-description" + assert results[index]["message"] == "Info \"description\" must be present and non-empty string." + assert results[index]["severity"] == "hint" + assert results[index]["path"] == ["info"] +