diff --git a/.docker-compose.env b/.docker-compose.env index 4154eca9f..beceae1b8 100644 --- a/.docker-compose.env +++ b/.docker-compose.env @@ -1,7 +1,6 @@ CELERY_BROKER_URL=amqp://guest:guest@rabbitmq:5672 DATABASE_HOST=postgres ELASTICSEARCH8_URL=https://elastic8:9200/ -# ELASTICSEARCH5_URL=http://elasticsearch:9200/ LOGIN_REDIRECT_URL=http://localhost:8003/ OSF_API_URL=http://localhost:8000 RABBITMQ_HOST=rabbitmq diff --git a/.dockerignore b/.dockerignore index 1b6f1a0ec..be5c0bfdf 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,28 +1,8 @@ **/.git/ **/*.pyc -README.md Dockerfile docker-compose.yml .dockerignore .gitignore celerybeat.pid -/static/ - -/au.*/ -/be.*/ -/br.*/ -/ca.*/ -/ch.*/ -/com.*/ -/edu.*/ -/et.*/ -/eu.*/ -/gov.*/ -/info.*/ -/io.*/ -/org.*/ -/pt.*/ -/ru.*/ -/tr.*/ -/uk.*/ -/za.*/ +.venv/ diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 24ec48af4..9fc98da63 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -9,12 +9,35 @@ permissions: checks: write # for coveralls jobs: + lint_and_type: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: setup python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + cache: 'poetry' + + - name: install despondencies + run: poetry install --with dev + + - name: flake it + run: poetry run flake8 . + + - name: type-check + run: poetry run mypy trove + run_tests: strategy: fail-fast: false matrix: - python-version: ['3.10'] # TODO: 3.11, 3.12 - postgres-version: ['15', '17'] + python-version: ['3.13'] + postgres-version: ['17'] runs-on: ubuntu-latest services: postgres: @@ -30,19 +53,13 @@ jobs: ports: - 5432:5432 elasticsearch8: - image: elasticsearch:8.7.0 + image: elasticsearch:8.18.1 env: xpack.security.enabled: false node.name: singlenode cluster.initial_master_nodes: singlenode ports: - 9208:9200 - elasticsearch5: - image: elasticsearch:5.4 - env: - ES_JAVA_OPTS: "-Xms512m -Xmx512m" - ports: - - 9205:9200 rabbitmq: image: rabbitmq:management ports: @@ -54,32 +71,25 @@ jobs: - name: install non-py dependencies run: sudo apt-get update && sudo apt-get install -y libxml2-dev libxslt1-dev libpq-dev git gcc - - name: set up python${{ matrix.python-version }} + - name: Install poetry + run: pipx install poetry + + - name: setup python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: pip - cache-dependency-path: | - requirements.txt - dev-requirements.txt - - - name: install py dependencies - run: pip install -r dev-requirements.txt + cache: 'poetry' - - name: install share - run: python setup.py develop - - - name: flake it - run: flake8 . + - name: install despondencies + run: poetry install --with dev - name: run tests run: | - coverage run -m pytest --create-db -x - coverage xml -o _shtrove_coverage.xml + poetry run coverage run -m pytest --create-db -x + poetry run coverage xml -o _shtrove_coverage.xml env: DATABASE_PASSWORD: postgres ELASTICSEARCH8_URL: http://localhost:9208/ - # ELASTICSEARCH5_URL: http://localhost:9205/ - name: coveralls uses: coverallsapp/github-action@v2 diff --git a/Dockerfile b/Dockerfile index 1d79b5beb..87c4a9d52 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10-slim-bullseye as app +FROM python:3.13-slim-bullseye AS app RUN apt-get update \ && apt-get install -y \ @@ -22,38 +22,58 @@ RUN apt-get update \ RUN update-ca-certificates +# set working directory once, use relative paths from "./" RUN mkdir -p /code WORKDIR /code -RUN pip install -U pip -RUN pip install uwsgi==2.0.21 +### +# python dependencies -COPY ./requirements.txt /code/requirements.txt -COPY ./constraints.txt /code/constraints.txt +# note: installs dependencies on the system, roundabouts `/usr/local/lib/python3.13/site-packages/` -RUN pip install --no-cache-dir -c /code/constraints.txt -r /code/requirements.txt +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_OPTIONS_ALWAYS_COPY=1 \ + POETRY_VIRTUALENVS_CREATE=0 \ + POETRY_VIRTUALENVS_IN_PROJECT=0 \ + POETRY_CACHE_DIR=/tmp/poetry-cache \ + POETRY_HOME=/tmp/poetry-venv -RUN apt-get remove -y \ - gcc \ - zlib1g-dev +RUN python -m venv $POETRY_HOME + +RUN $POETRY_HOME/bin/pip install poetry==2.1.3 + +COPY pyproject.toml poetry.lock ./ + +RUN $POETRY_HOME/bin/poetry install --compile --no-root + +COPY ./ ./ -COPY ./ /code/ +RUN $POETRY_HOME/bin/poetry install --compile --only-root RUN python manage.py collectstatic --noinput ARG GIT_TAG= ARG GIT_COMMIT= -ENV VERSION ${GIT_TAG} -ENV GIT_COMMIT ${GIT_COMMIT} - -RUN python setup.py develop +ENV VERSION=${GIT_TAG} +ENV GIT_COMMIT=${GIT_COMMIT} CMD ["python", "manage.py", "--help"] +### Dev +FROM app AS dev + +RUN $POETRY_HOME/bin/poetry install --compile --only dev + ### Dist FROM app AS dist -### Dev -FROM app AS dev +RUN $POETRY_HOME/bin/poetry install --compile --only deploy -RUN pip install --no-cache-dir -c /code/constraints.txt -r /code/dev-requirements.txt +# remove packages needed only for install +RUN apt-get remove -y \ + gcc \ + zlib1g-dev \ + && apt-get clean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -rf /tmp/poetry-* diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 070ac2960..000000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ --r requirements.txt - -behave==1.2.6 -coveralls==3.1.0 -django-debug-toolbar==3.2.1 -factory-boy==3.2.0 -faker==8.10.0 -flake8==5.0.4 -httpretty==1.1.3 -pytest-benchmark==3.4.1 -pytest==6.2.4 -pytest-django==4.4.0 -jedi==0.18.0 diff --git a/docker-compose.yml b/docker-compose.yml index 62091170d..b158d2e12 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,7 +32,7 @@ services: #################### elastic8_setup: - image: docker.elastic.co/elasticsearch/elasticsearch:8.7.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.18.3 volumes: - elastic8_cert_vol:/usr/share/elasticsearch/config/certs user: "0" @@ -89,7 +89,7 @@ services: depends_on: elastic8_setup: condition: service_healthy - image: docker.elastic.co/elasticsearch/elasticsearch:8.7.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.18.3 ports: - 9208:9200 volumes: @@ -126,21 +126,12 @@ services: - share_network postgres: - image: postgres:10 - command: - - /bin/bash - - -c - - echo "$$POSTGRES_INITDB" > /docker-entrypoint-initdb.d/commands.sh && - chmod +x /docker-entrypoint-initdb.d/commands.sh && - /docker-entrypoint.sh postgres + image: postgres:15.4 ports: - 5433:5432 environment: POSTGRES_DB: share POSTGRES_HOST_AUTH_METHOD: trust - POSTGRES_INITDB: | - sed -i -e 's/max_connections.*/max_connections = 5000/' /var/lib/postgresql/data/postgresql.conf - sed -i -e 's/#log_min_duration_statement = .*/log_min_duration_statement = 0/' /var/lib/postgresql/data/postgresql.conf volumes: - "${POSTGRES_DATA_VOL:-postgres_data_vol}:/var/lib/postgresql/data/" stdin_open: true @@ -152,27 +143,28 @@ services: ################## requirements: - image: quay.io/centerforopenscience/share:develop + build: + context: . + dockerfile: Dockerfile + target: dev command: - /bin/bash - -c - - apt-get update && - apt-get install -y gcc && - pip install -r requirements.txt -r dev-requirements.txt && - (python3 -m compileall /usr/local/lib/python3.10 || true) && - rm -Rf /python3.10/* && - apt-get remove -y gcc && - cp -Rf -p /usr/local/lib/python3.10 / && - python3 setup.py develop + - $$POETRY_HOME/bin/poetry install --compile --with=dev && + rm -Rf /python3.13/* && + cp -Rf -p /usr/local/lib/python3.13 / restart: 'no' volumes: - ./:/code:cached - - share_requirements_vol:/python3.10 + - share_requirements_vol:/python3.13 networks: - share_network worker: - image: quay.io/centerforopenscience/share:develop + build: + context: . + dockerfile: Dockerfile + target: dev command: /bin/bash -c 'cp -r /elastic8_certs /elastic_certs && chown -R daemon:daemon /elastic_certs/ && /usr/local/bin/celery --app project worker --uid daemon -l INFO' depends_on: @@ -183,7 +175,7 @@ services: - indexer volumes: - ./:/code:cached - - share_requirements_vol:/usr/local/lib/python3.10 + - share_requirements_vol:/usr/local/lib/python3.13 - elastic8_cert_vol:/elastic8_certs env_file: - .docker-compose.env @@ -195,7 +187,10 @@ services: - share_network web: - image: quay.io/centerforopenscience/share:develop + build: + context: . + dockerfile: Dockerfile + target: dev command: python manage.py runserver 0.0.0.0:8000 ports: - 8003:8000 @@ -205,7 +200,7 @@ services: - elastic8 volumes: - ./:/code:cached - - share_requirements_vol:/usr/local/lib/python3.10 + - share_requirements_vol:/usr/local/lib/python3.13 - elastic8_cert_vol:/elastic8_certs env_file: - .docker-compose.env @@ -217,7 +212,10 @@ services: - share_network indexer: - image: quay.io/centerforopenscience/share:develop + build: + context: . + dockerfile: Dockerfile + target: dev command: python manage.py shtrove_indexer_run depends_on: - postgres @@ -225,7 +223,7 @@ services: - elastic8 volumes: - ./:/code:cached - - share_requirements_vol:/usr/local/lib/python3.10 + - share_requirements_vol:/usr/local/lib/python3.13 - elastic8_cert_vol:/elastic8_certs env_file: - .docker-compose.env diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..894c8a056 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,79 @@ +[mypy] +python_version = 3.13 + +# display options +show_column_numbers = True +pretty = True + +# start with an ideal: enable strict type-checking, then loosen in module-specific config +# see https://mypy.readthedocs.io/en/stable/existing_code.html#introduce-stricter-options +strict = True +## BEGIN possible loosenings from `strict`: +# disallow_subclassing_any = False +# warn_unused_configs = False +# warn_redundant_casts = False +# warn_unused_ignores = False +# strict_equality = False +# strict_concatenate = False +# check_untyped_defs = False +# disallow_untyped_decorators = False +# disallow_any_generics = False +# disallow_untyped_calls = False +# disallow_incomplete_defs = False +# disallow_untyped_defs = False +# no_implicit_reexport = False +# warn_return_any = False +## END loosenings of `strict` + +# prefer types that can be understood by reading code in only one place +local_partial_types = True +# avoid easily-avoidable dead code +warn_unreachable = True +# prefer direct imports +implicit_reexport = False + +# got untyped dependencies -- this is fine +ignore_missing_imports = True +disable_error_code = import-untyped,import-not-found + + +### +# module-specific config + +## sharev2 code; largely unannotated +[mypy-share.*,api.*,project.*,osf_oauth2_adapter.*,manage] +# loosen strict: +disallow_subclassing_any = False +disallow_untyped_decorators = False +disallow_any_generics = False +disallow_untyped_calls = False +disallow_incomplete_defs = False +disallow_untyped_defs = False +warn_return_any = False +disable_error_code = var-annotated,attr-defined + +## django migrations are whatever +[mypy-*.migrations.*] +strict = False +disallow_subclassing_any = False + +## tests are looser +[mypy-tests.*] +disallow_untyped_defs = False + +## trove code aiming to someday be well-annotated (except for the django...) +[mypy-trove.*] +disallow_untyped_decorators = False +disallow_any_generics = False +warn_return_any = False + +[mypy-trove.views.*,trove.admin,trove.apps] +disallow_subclassing_any = False +disallow_untyped_defs = False +disallow_incomplete_defs = False + +[mypy-trove.models.*] +disallow_subclassing_any = False +disallow_untyped_defs = False +disallow_incomplete_defs = False +disable_error_code = var-annotated,attr-defined diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..3f1f85e66 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2036 @@ +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "amqp" +version = "5.3.1" +description = "Low-level AMQP client for Python (fork of amqplib)." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, + {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, +] + +[package.dependencies] +vine = ">=5.0.0,<6.0.0" + +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "bcrypt" +version = "4.3.0" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"}, + {file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"}, + {file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"}, + {file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"}, + {file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"}, + {file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"}, + {file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"}, + {file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "billiard" +version = "4.2.1" +description = "Python multiprocessing fork with improvements and bugfixes" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"}, + {file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"}, +] + +[[package]] +name = "celery" +version = "5.4.0" +description = "Distributed Task Queue." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"}, + {file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"}, +] + +[package.dependencies] +billiard = ">=4.2.0,<5.0" +click = ">=8.1.2,<9.0" +click-didyoumean = ">=0.3.0" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = ">=5.3.4,<6.0" +python-dateutil = ">=2.8.2" +tzdata = ">=2022.7" +vine = ">=5.1.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=2.0.2)"] +auth = ["cryptography (==42.0.5)"] +azureblockblob = ["azure-storage-blob (>=12.15.0)"] +brotli = ["brotli (>=1.0.0) ; platform_python_implementation == \"CPython\"", "brotlipy (>=0.7.0) ; platform_python_implementation == \"PyPy\""] +cassandra = ["cassandra-driver (>=3.25.0,<4)"] +consul = ["python-consul2 (==0.1.5)"] +cosmosdbsql = ["pydocumentdb (==2.3.5)"] +couchbase = ["couchbase (>=3.0.0) ; platform_python_implementation != \"PyPy\" and (platform_system != \"Windows\" or python_version < \"3.10\")"] +couchdb = ["pycouchdb (==1.14.2)"] +django = ["Django (>=2.2.28)"] +dynamodb = ["boto3 (>=1.26.143)"] +elasticsearch = ["elastic-transport (<=8.13.0)", "elasticsearch (<=8.13.0)"] +eventlet = ["eventlet (>=0.32.0) ; python_version < \"3.10\""] +gcs = ["google-cloud-storage (>=2.10.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""] +memcache = ["pylibmc (==1.6.3) ; platform_system != \"Windows\""] +mongodb = ["pymongo[srv] (>=4.0.2)"] +msgpack = ["msgpack (==1.0.8)"] +pymemcache = ["python-memcached (>=1.61)"] +pyro = ["pyro4 (==4.82) ; python_version < \"3.11\""] +pytest = ["pytest-celery[all] (>=1.0.0)"] +redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] +s3 = ["boto3 (>=1.26.143)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem (==4.1.5) ; platform_python_implementation != \"PyPy\""] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.4)", "pycurl (>=7.43.0.5) ; sys_platform != \"win32\" and platform_python_implementation == \"CPython\"", "urllib3 (>=1.26.16)"] +tblib = ["tblib (>=1.3.0) ; python_version < \"3.8.0\"", "tblib (>=1.5.0) ; python_version >= \"3.8.0\""] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard (==0.22.0)"] + +[[package]] +name = "certifi" +version = "2025.6.15" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057"}, + {file = "certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["main", "deploy"] +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] +markers = {main = "platform_python_implementation != \"PyPy\"", deploy = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, +] + +[[package]] +name = "click" +version = "8.2.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +description = "Enables git-like *did-you-mean* feature in click" +optional = false +python-versions = ">=3.6.2" +groups = ["main"] +files = [ + {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, + {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, +] + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1.2" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6"}, + {file = "click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.3.0" +description = "REPL plugin for Click" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, + {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, +] + +[package.dependencies] +click = ">=7.0" +prompt-toolkit = ">=3.0.36" + +[package.extras] +testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "sys_platform == \"win32\""} + +[[package]] +name = "colorlog" +version = "6.9.0" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff"}, + {file = "colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "coverage" +version = "6.5.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "coveralls" +version = "3.3.1" +description = "Show coverage stats online via coveralls.io" +optional = false +python-versions = ">= 3.5" +groups = ["dev"] +files = [ + {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"}, + {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"}, +] + +[package.dependencies] +coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" +docopt = ">=0.6.1" +requests = ">=1.0.0" + +[package.extras] +yaml = ["PyYAML (>=3.10)"] + +[[package]] +name = "cron-descriptor" +version = "1.4.5" +description = "A Python library that converts cron expressions into human readable strings." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "cron_descriptor-1.4.5-py3-none-any.whl", hash = "sha256:736b3ae9d1a99bc3dbfc5b55b5e6e7c12031e7ba5de716625772f8b02dcd6013"}, + {file = "cron_descriptor-1.4.5.tar.gz", hash = "sha256:f51ce4ffc1d1f2816939add8524f206c376a42c87a5fca3091ce26725b3b1bca"}, +] + +[package.extras] +dev = ["polib"] + +[[package]] +name = "cryptography" +version = "45.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +files = [ + {file = "cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:25eb4d4d3e54595dc8adebc6bbd5623588991d86591a78c2548ffb64797341e2"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce1678a2ccbe696cf3af15a75bb72ee008d7ff183c9228592ede9db467e64f1b"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:49fe9155ab32721b9122975e168a6760d8ce4cffe423bcd7ca269ba41b5dfac1"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999"}, + {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750"}, + {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2"}, + {file = "cryptography-45.0.4-cp311-abi3-win32.whl", hash = "sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257"}, + {file = "cryptography-45.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8"}, + {file = "cryptography-45.0.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eaa3e28ea2235b33220b949c5a0d6cf79baa80eab2eb5607ca8ab7525331b9ff"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7ef2dde4fa9408475038fc9aadfc1fb2676b174e68356359632e980c661ec8f6"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:6a3511ae33f09094185d111160fd192c67aa0a2a8d19b54d36e4c78f651dc5ad"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6"}, + {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872"}, + {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4"}, + {file = "cryptography-45.0.4-cp37-abi3-win32.whl", hash = "sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97"}, + {file = "cryptography-45.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a77c6fb8d76e9c9f99f2f3437c1a4ac287b34eaf40997cfab1e9bd2be175ac39"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7aad98a25ed8ac917fdd8a9c1e706e5a0956e06c498be1f713b61734333a4507"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3530382a43a0e524bc931f187fc69ef4c42828cf7d7f592f7f249f602b5a4ab0"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:6b613164cb8425e2f8db5849ffb84892e523bf6d26deb8f9bb76ae86181fa12b"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:96d4819e25bf3b685199b304a0029ce4a3caf98947ce8a066c9137cc78ad2c58"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b97737a3ffbea79eebb062eb0d67d72307195035332501722a9ca86bab9e3ab2"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4828190fb6c4bcb6ebc6331f01fe66ae838bb3bd58e753b59d4b22eb444b996c"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:03dbff8411206713185b8cebe31bc5c0eb544799a50c09035733716b386e61a4"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51dfbd4d26172d31150d84c19bbe06c68ea4b7f11bbc7b3a5e146b367c311349"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:0339a692de47084969500ee455e42c58e449461e0ec845a34a6a9b9bf7df7fb8"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:0cf13c77d710131d33e63626bd55ae7c0efb701ebdc2b3a7952b9b23a0412862"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bbc505d1dc469ac12a0a064214879eac6294038d6b24ae9f71faae1448a9608d"}, + {file = "cryptography-45.0.4.tar.gz", hash = "sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57"}, +] + +[package.dependencies] +cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""] +pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==45.0.4)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "django" +version = "5.2.3" +description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "django-5.2.3-py3-none-any.whl", hash = "sha256:c517a6334e0fd940066aa9467b29401b93c37cec2e61365d663b80922542069d"}, + {file = "django-5.2.3.tar.gz", hash = "sha256:335213277666ab2c5cac44a792a6d2f3d58eb79a80c14b6b160cd4afc3b75684"}, +] + +[package.dependencies] +asgiref = ">=3.8.1" +sqlparse = ">=0.3.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +argon2 = ["argon2-cffi (>=19.1.0)"] +bcrypt = ["bcrypt"] + +[[package]] +name = "django-allauth" +version = "65.5.0" +description = "Integrated set of Django applications addressing authentication, registration, account management as well as 3rd party (social) account authentication." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "django_allauth-65.5.0.tar.gz", hash = "sha256:1a564fd2f5413054559078c2b7146796b517c1e7a38c6312e9de7c9bb708325d"}, +] + +[package.dependencies] +asgiref = ">=3.8.1" +Django = ">=4.2.16" + +[package.extras] +mfa = ["fido2 (>=1.1.2)", "qrcode (>=7.0.0)"] +openid = ["python3-openid (>=3.0.8)"] +saml = ["python3-saml (>=1.15.0,<2.0.0)"] +socialaccount = ["pyjwt[crypto] (>=1.7)", "requests (>=2.0.0)", "requests-oauthlib (>=0.3.0)"] +steam = ["python3-openid (>=3.0.8)"] + +[[package]] +name = "django-celery-beat" +version = "2.8.1" +description = "Database-backed Periodic Tasks." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "django_celery_beat-2.8.1-py3-none-any.whl", hash = "sha256:da2b1c6939495c05a551717509d6e3b79444e114a027f7b77bf3727c2a39d171"}, + {file = "django_celery_beat-2.8.1.tar.gz", hash = "sha256:dfad0201c0ac50c91a34700ef8fa0a10ee098cc7f3375fe5debed79f2204f80a"}, +] + +[package.dependencies] +celery = ">=5.2.3,<6.0" +cron-descriptor = ">=1.2.32" +Django = ">=2.2,<6.0" +django-timezone-field = ">=5.0" +python-crontab = ">=2.3.4" +tzdata = "*" + +[[package]] +name = "django-cors-headers" +version = "4.7.0" +description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "django_cors_headers-4.7.0-py3-none-any.whl", hash = "sha256:f1c125dcd58479fe7a67fe2499c16ee38b81b397463cf025f0e2c42937421070"}, + {file = "django_cors_headers-4.7.0.tar.gz", hash = "sha256:6fdf31bf9c6d6448ba09ef57157db2268d515d94fc5c89a0a1028e1fc03ee52b"}, +] + +[package.dependencies] +asgiref = ">=3.6" +django = ">=4.2" + +[[package]] +name = "django-debug-toolbar" +version = "5.1.0" +description = "A configurable set of panels that display various debug information about the current request/response." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "django_debug_toolbar-5.1.0-py3-none-any.whl", hash = "sha256:c0591e338ee9603bdfce5aebf8d18ca7341fdbb69595e2b0b34869be5857180e"}, + {file = "django_debug_toolbar-5.1.0.tar.gz", hash = "sha256:8a3b9da4aeab8d384a366e20304bd939a451f0242523c5b7b402248ad474eed2"}, +] + +[package.dependencies] +django = ">=4.2.9" +sqlparse = ">=0.2" + +[[package]] +name = "django-extensions" +version = "3.2.3" +description = "Extensions for Django" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, + {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, +] + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-filter" +version = "25.1" +description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "django_filter-25.1-py3-none-any.whl", hash = "sha256:4fa48677cf5857b9b1347fed23e355ea792464e0fe07244d1fdfb8a806215b80"}, + {file = "django_filter-25.1.tar.gz", hash = "sha256:1ec9eef48fa8da1c0ac9b411744b16c3f4c31176c867886e4c48da369c407153"}, +] + +[package.dependencies] +Django = ">=4.2" + +[[package]] +name = "django-oauth-toolkit" +version = "3.0.1" +description = "OAuth2 Provider for Django" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "django_oauth_toolkit-3.0.1-py3-none-any.whl", hash = "sha256:3ef00b062a284f2031b0732b32dc899e3bbf0eac221bbb1cffcb50b8932e55ed"}, + {file = "django_oauth_toolkit-3.0.1.tar.gz", hash = "sha256:7200e4a9fb229b145a6d808cbf0423b6d69a87f68557437733eec3c0cf71db02"}, +] + +[package.dependencies] +django = ">=4.2" +jwcrypto = ">=1.5.0" +oauthlib = ">=3.2.2" +requests = ">=2.13.0" + +[[package]] +name = "django-timezone-field" +version = "7.1" +description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "django_timezone_field-7.1-py3-none-any.whl", hash = "sha256:93914713ed882f5bccda080eda388f7006349f25930b6122e9b07bf8db49c4b4"}, + {file = "django_timezone_field-7.1.tar.gz", hash = "sha256:b3ef409d88a2718b566fabe10ea996f2838bc72b22d3a2900c0aa905c761380c"}, +] + +[package.dependencies] +Django = ">=3.2,<6.0" + +[[package]] +name = "djangorestframework" +version = "3.16.0" +description = "Web APIs for Django, made easy." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "djangorestframework-3.16.0-py3-none-any.whl", hash = "sha256:bea7e9f6b96a8584c5224bfb2e4348dfb3f8b5e34edbecb98da258e892089361"}, + {file = "djangorestframework-3.16.0.tar.gz", hash = "sha256:f022ff46613584de994c0c6a4aebbace5fd700555fbe9d33b865ebf173eba6c9"}, +] + +[package.dependencies] +django = ">=4.2" + +[[package]] +name = "djangorestframework-jsonapi" +version = "7.1.0" +description = "A Django REST framework API adapter for the JSON:API spec." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "djangorestframework_jsonapi-7.1.0-py2.py3-none-any.whl", hash = "sha256:506d7179b6c2d29c4b7879afb4cadf8b0bb468accd40787491875ad680b4993a"}, + {file = "djangorestframework_jsonapi-7.1.0.tar.gz", hash = "sha256:487e4fe5a0590e655cb3af947c8cee7b54be8ced14a81aac53056f2c43a92343"}, +] + +[package.dependencies] +django = ">=4.2" +djangorestframework = ">=3.14" +inflection = ">=0.5.0" + +[package.extras] +django-filter = ["django-filter (>=2.4)"] +django-polymorphic = ["django-polymorphic (>=3.0)"] +openapi = ["pyyaml (>=5.4)", "uritemplate (>=3.0.1)"] + +[[package]] +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] + +[[package]] +name = "elastic-transport" +version = "8.17.1" +description = "Transport classes and utilities shared among Python Elastic client libraries" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "elastic_transport-8.17.1-py3-none-any.whl", hash = "sha256:192718f498f1d10c5e9aa8b9cf32aed405e469a7f0e9d6a8923431dbb2c59fb8"}, + {file = "elastic_transport-8.17.1.tar.gz", hash = "sha256:5edef32ac864dca8e2f0a613ef63491ee8d6b8cfb52881fa7313ba9290cac6d2"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.26.2,<3" + +[package.extras] +develop = ["aiohttp", "furo", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] + +[[package]] +name = "elasticsearch8" +version = "8.17.2" +description = "Python client for Elasticsearch" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "elasticsearch8-8.17.2-py3-none-any.whl", hash = "sha256:91fcc455a59ebf347ccff8d110534030fd0f219585f3e1ecd09d206b0de61445"}, + {file = "elasticsearch8-8.17.2.tar.gz", hash = "sha256:8fa15a4163c527c92aa13c2320fc8c0dc399060f2638ed016ca0859f81120803"}, +] + +[package.dependencies] +elastic-transport = ">=8.15.1,<9" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +dev = ["aiohttp", "black", "build", "coverage", "isort", "jinja2", "mapbox-vector-tile", "nox", "numpy", "orjson", "pandas", "pyarrow", "pytest", "pytest-asyncio", "pytest-cov", "python-dateutil", "pyyaml (>=5.4)", "requests (>=2,<3)", "simsimd", "twine", "unasync"] +docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme (>=2.0)"] +orjson = ["orjson (>=3)"] +pyarrow = ["pyarrow (>=1)"] +requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"] +vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] + +[[package]] +name = "factory-boy" +version = "3.3.3" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "factory_boy-3.3.3-py2.py3-none-any.whl", hash = "sha256:1c39e3289f7e667c4285433f305f8d506efc2fe9c73aaea4151ebd5cdea394fc"}, + {file = "factory_boy-3.3.3.tar.gz", hash = "sha256:866862d226128dfac7f2b4160287e899daf54f2612778327dd03d0e2cb1e3d03"}, +] + +[package.dependencies] +Faker = ">=0.7.0" + +[package.extras] +dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "mongomock", "mypy", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] +doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] + +[[package]] +name = "faker" +version = "37.1.0" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "faker-37.1.0-py3-none-any.whl", hash = "sha256:dc2f730be71cb770e9c715b13374d80dbcee879675121ab51f9683d262ae9a1c"}, + {file = "faker-37.1.0.tar.gz", hash = "sha256:ad9dc66a3b84888b837ca729e85299a96b58fdaef0323ed0baace93c9614af06"}, +] + +[package.dependencies] +tzdata = "*" + +[[package]] +name = "flake8" +version = "7.2.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343"}, + {file = "flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.13.0,<2.14.0" +pyflakes = ">=3.3.0,<3.4.0" + +[[package]] +name = "gevent" +version = "24.11.1" +description = "Coroutine-based network library" +optional = false +python-versions = ">=3.9" +groups = ["deploy"] +files = [ + {file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7398c629d43b1b6fd785db8ebd46c0a353880a6fab03d1cf9b6788e7240ee32e"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7886b63ebfb865178ab28784accd32f287d5349b3ed71094c86e4d3ca738af5"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ca80711e6553880974898d99357fb649e062f9058418a92120ca06c18c3c59"}, + {file = "gevent-24.11.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e24181d172f50097ac8fc272c8c5b030149b630df02d1c639ee9f878a470ba2b"}, + {file = "gevent-24.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1d4fadc319b13ef0a3c44d2792f7918cf1bca27cacd4d41431c22e6b46668026"}, + {file = "gevent-24.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d882faa24f347f761f934786dde6c73aa6c9187ee710189f12dcc3a63ed4a50"}, + {file = "gevent-24.11.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:351d1c0e4ef2b618ace74c91b9b28b3eaa0dd45141878a964e03c7873af09f62"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5efe72e99b7243e222ba0c2c2ce9618d7d36644c166d63373af239da1036bab"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d3b249e4e1f40c598ab8393fc01ae6a3b4d51fc1adae56d9ba5b315f6b2d758"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81d918e952954675f93fb39001da02113ec4d5f4921bf5a0cc29719af6824e5d"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c935b83d40c748b6421625465b7308d87c7b3717275acd587eef2bd1c39546"}, + {file = "gevent-24.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff96c5739834c9a594db0e12bf59cb3fa0e5102fc7b893972118a3166733d61c"}, + {file = "gevent-24.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d6c0a065e31ef04658f799215dddae8752d636de2bed61365c358f9c91e7af61"}, + {file = "gevent-24.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:97e2f3999a5c0656f42065d02939d64fffaf55861f7d62b0107a08f52c984897"}, + {file = "gevent-24.11.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:a3d75fa387b69c751a3d7c5c3ce7092a171555126e136c1d21ecd8b50c7a6e46"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:beede1d1cff0c6fafae3ab58a0c470d7526196ef4cd6cc18e7769f207f2ea4eb"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85329d556aaedced90a993226d7d1186a539c843100d393f2349b28c55131c85"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:816b3883fa6842c1cf9d2786722014a0fd31b6312cca1f749890b9803000bad6"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b24d800328c39456534e3bc3e1684a28747729082684634789c2f5a8febe7671"}, + {file = "gevent-24.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5f1701ce0f7832f333dd2faf624484cbac99e60656bfbb72504decd42970f0f"}, + {file = "gevent-24.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d740206e69dfdfdcd34510c20adcb9777ce2cc18973b3441ab9767cd8948ca8a"}, + {file = "gevent-24.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:68bee86b6e1c041a187347ef84cf03a792f0b6c7238378bf6ba4118af11feaae"}, + {file = "gevent-24.11.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d618e118fdb7af1d6c1a96597a5cd6ac84a9f3732b5be8515c6a66e098d498b6"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2142704c2adce9cd92f6600f371afb2860a446bfd0be5bd86cca5b3e12130766"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92e0d7759de2450a501effd99374256b26359e801b2d8bf3eedd3751973e87f5"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca845138965c8c56d1550499d6b923eb1a2331acfa9e13b817ad8305dde83d11"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:356b73d52a227d3313f8f828025b665deada57a43d02b1cf54e5d39028dbcf8d"}, + {file = "gevent-24.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:58851f23c4bdb70390f10fc020c973ffcf409eb1664086792c8b1e20f25eef43"}, + {file = "gevent-24.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1ea50009ecb7f1327347c37e9eb6561bdbc7de290769ee1404107b9a9cba7cf1"}, + {file = "gevent-24.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:ec68e270543ecd532c4c1d70fca020f90aa5486ad49c4f3b8b2e64a66f5c9274"}, + {file = "gevent-24.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9347690f4e53de2c4af74e62d6fabc940b6d4a6cad555b5a379f61e7d3f2a8e"}, + {file = "gevent-24.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8619d5c888cb7aebf9aec6703e410620ef5ad48cdc2d813dd606f8aa7ace675f"}, + {file = "gevent-24.11.1-cp39-cp39-win32.whl", hash = "sha256:c6b775381f805ff5faf250e3a07c0819529571d19bb2a9d474bee8c3f90d66af"}, + {file = "gevent-24.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c3443b0ed23dcb7c36a748d42587168672953d368f2956b17fad36d43b58836"}, + {file = "gevent-24.11.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:f43f47e702d0c8e1b8b997c00f1601486f9f976f84ab704f8f11536e3fa144c9"}, + {file = "gevent-24.11.1.tar.gz", hash = "sha256:8bd1419114e9e4a3ed33a5bad766afff9a3cf765cb440a582a1b3a9bc80c1aca"}, +] + +[package.dependencies] +cffi = {version = ">=1.17.1", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} +greenlet = {version = ">=3.1.1", markers = "platform_python_implementation == \"CPython\""} +"zope.event" = "*" +"zope.interface" = "*" + +[package.extras] +dnspython = ["dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\""] +docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"] +monitor = ["psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""] +recommended = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""] +test = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "coverage (>=5.0) ; sys_platform != \"win32\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "objgraph", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\"", "requests"] + +[[package]] +name = "greenlet" +version = "3.2.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["deploy"] +markers = "platform_python_implementation == \"CPython\"" +files = [ + {file = "greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b"}, + {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712"}, + {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00"}, + {file = "greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302"}, + {file = "greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5"}, + {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc"}, + {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba"}, + {file = "greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34"}, + {file = "greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb"}, + {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c"}, + {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163"}, + {file = "greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849"}, + {file = "greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b"}, + {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0"}, + {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36"}, + {file = "greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3"}, + {file = "greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141"}, + {file = "greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a"}, + {file = "greenlet-3.2.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42efc522c0bd75ffa11a71e09cd8a399d83fafe36db250a87cf1dacfaa15dc64"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d760f9bdfe79bff803bad32b4d8ffb2c1d2ce906313fc10a83976ffb73d64ca7"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8324319cbd7b35b97990090808fdc99c27fe5338f87db50514959f8059999805"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:8c37ef5b3787567d322331d5250e44e42b58c8c713859b8a04c6065f27efbf72"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce539fb52fb774d0802175d37fcff5c723e2c7d249c65916257f0a940cee8904"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:003c930e0e074db83559edc8705f3a2d066d4aa8c2f198aff1e454946efd0f26"}, + {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7e70ea4384b81ef9e84192e8a77fb87573138aa5d4feee541d8014e452b434da"}, + {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22eb5ba839c4b2156f18f76768233fe44b23a31decd9cc0d4cc8141c211fd1b4"}, + {file = "greenlet-3.2.3-cp39-cp39-win32.whl", hash = "sha256:4532f0d25df67f896d137431b13f4cdce89f7e3d4a96387a41290910df4d3a57"}, + {file = "greenlet-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:aaa7aae1e7f75eaa3ae400ad98f8644bb81e1dc6ba47ce8a93d3f17274e08322"}, + {file = "greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "jwcrypto" +version = "1.5.6" +description = "Implementation of JOSE Web standards" +optional = false +python-versions = ">= 3.8" +groups = ["main"] +files = [ + {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, + {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, +] + +[package.dependencies] +cryptography = ">=3.4" +typing-extensions = ">=4.5.0" + +[[package]] +name = "kombu" +version = "5.5.0" +description = "Messaging library for Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "kombu-5.5.0-py3-none-any.whl", hash = "sha256:526c6cf038c986b998639109a1eb762502f831e8da148cc928f1f95cd91eb874"}, + {file = "kombu-5.5.0.tar.gz", hash = "sha256:72e65c062e903ee1b4e8b68d348f63c02afc172eda409e3aca85867752e79c0b"}, +] + +[package.dependencies] +amqp = ">=5.1.1,<6.0.0" +tzdata = {version = "2025.1", markers = "python_version >= \"3.9\""} +vine = "5.1.0" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.10.0)"] +azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] +confluentkafka = ["confluent-kafka (>=2.2.0)"] +consul = ["python-consul2 (==0.1.5)"] +gcpubsub = ["google-cloud-monitoring (>=2.16.0)", "google-cloud-pubsub (>=2.18.4)", "grpcio (==1.67.0)", "protobuf (==4.25.5)"] +librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""] +mongodb = ["pymongo (>=4.1.1)"] +msgpack = ["msgpack (==1.1.0)"] +pyro = ["pyro4 (==4.82)"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2,<=5.2.1)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "urllib3 (>=1.26.16)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=2.8.0)"] + +[[package]] +name = "lxml" +version = "5.3.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11)"] + +[[package]] +name = "markdown2" +version = "2.5.3" +description = "A fast and complete Python implementation of Markdown" +optional = false +python-versions = "<4,>=3.9" +groups = ["main"] +files = [ + {file = "markdown2-2.5.3-py3-none-any.whl", hash = "sha256:a8ebb7e84b8519c37bf7382b3db600f1798a22c245bfd754a1f87ca8d7ea63b3"}, + {file = "markdown2-2.5.3.tar.gz", hash = "sha256:4d502953a4633408b0ab3ec503c5d6984d1b14307e32b325ec7d16ea57524895"}, +] + +[package.extras] +all = ["latex2mathml ; python_version >= \"3.8.1\"", "pygments (>=2.7.3)", "wavedrom"] +code-syntax-highlighting = ["pygments (>=2.7.3)"] +latex = ["latex2mathml ; python_version >= \"3.8.1\""] +wavedrom = ["wavedrom"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy" +version = "1.16.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a"}, + {file = "mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72"}, + {file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea"}, + {file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574"}, + {file = "mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d"}, + {file = "mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6"}, + {file = "mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc"}, + {file = "mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782"}, + {file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507"}, + {file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca"}, + {file = "mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4"}, + {file = "mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6"}, + {file = "mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d"}, + {file = "mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9"}, + {file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79"}, + {file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15"}, + {file = "mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd"}, + {file = "mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b"}, + {file = "mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438"}, + {file = "mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536"}, + {file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f"}, + {file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359"}, + {file = "mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be"}, + {file = "mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee"}, + {file = "mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069"}, + {file = "mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da"}, + {file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c"}, + {file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383"}, + {file = "mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40"}, + {file = "mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b"}, + {file = "mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37"}, + {file = "mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "newrelic" +version = "10.7.0" +description = "New Relic Python Agent" +optional = false +python-versions = ">=3.7" +groups = ["deploy"] +files = [ + {file = "newrelic-10.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08e959814e0b23a8f96383955cceecb6180dc66f240279c45ee8484058f96eb4"}, + {file = "newrelic-10.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12b7e88e0d78497b4e3dfca0411a76a548ee15842b9d6ef971035bbdc91693"}, + {file = "newrelic-10.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4125d02c016c3b4a0f88d5ce184d78d4101531d1f76525f7f1ee750e453603f1"}, + {file = "newrelic-10.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:680a75e3dd37e86bf1eef2b77408dd1953c94a7b803b56e6f8c3f42164580e35"}, + {file = "newrelic-10.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:877b53049be9dfb3ad2845f00c57a3eb1aadeaec700f09a8f6c7fec1f6e71c2b"}, + {file = "newrelic-10.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02916527900f6d209682d3dd77c964fb38ca7d84c31a293085e4a84fa35957d"}, + {file = "newrelic-10.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:683b5158203e28e46b348f9657054eb25cfb7367e21524a457235d9c5a5cc4ed"}, + {file = "newrelic-10.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dac3003f22e1edd564f7d7556c84f1fb2f61140c46040befa626bdc8f69a4a89"}, + {file = "newrelic-10.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd86115079045e5a9630168ae1a48fdef7f2782c9268d1f04a7ae7716a6129d"}, + {file = "newrelic-10.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecf6a0b560b10c72fd592c1dcb6ea8812e7876d6e30709b6c5184fdd4e399d62"}, + {file = "newrelic-10.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:30b1c668beb12b9627bac6b037f9a2e3f374e678a75c57f63566a4a7ea055e9e"}, + {file = "newrelic-10.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9319828bc2b46b9a25a88a97fab1a9e05a4c9d4bed484206f59e04e2f7cbd1cd"}, + {file = "newrelic-10.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6855485d0de0f02271f67617c7a63b44c44f46e50763f37a016a1a117ae8af56"}, + {file = "newrelic-10.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78adc209e24cc223ac19104616caa3b8cb789d1ed97d082472d3b7e9d04023d"}, + {file = "newrelic-10.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc1f090a115688fe7829e7aea1dcf37913a24450813908d9ce6b4eb0831cbbbf"}, + {file = "newrelic-10.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cf6b8f12abf9c984a4e62b0de66d85e2c5153f367dd6d4149544d931e59bcb8d"}, + {file = "newrelic-10.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64708227cf2d56f96f1d2697b23cc5be4952bbd790f0ba63164bedcdbbb457fc"}, + {file = "newrelic-10.7.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a03832922d05530088aab9acb84bc7758cc8196305852652abb6face3c346ede"}, + {file = "newrelic-10.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e97b3239159d9a178c07043e9da56e841de2b56b947070b7038ddcb93f99fba0"}, + {file = "newrelic-10.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:32f089e276f36b73de62c61ba7648d77de70893fe4d9a7c15f95e20f4978f461"}, + {file = "newrelic-10.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83bdce11a0a5929ed5ab5db420f54224662c97fbce4fb6efbe27633ad54d30e2"}, + {file = "newrelic-10.7.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a6b906d8098cd15639f02152a3c94c879c5a841b177b7ee2e6e13ca3a0f37cf"}, + {file = "newrelic-10.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:96f805812a912a8a4008b6f28e109e0d8943c80dd136980a9d3914be5e75a416"}, + {file = "newrelic-10.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8d47f6041c3f28844eaa9cdf0905415fe5fc617ee6623c391532830a1205133e"}, + {file = "newrelic-10.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad5b78a6997ce237185e3911d9a616de0781f600031d53ecce1edeafcca0c79"}, + {file = "newrelic-10.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:772a3c1b5fae12253629771cf677197be48c481c4c6ee7a6233a469dc7e37057"}, + {file = "newrelic-10.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:650e1818ee404ace26efb2935e6326dbcbf754fbea496710da3889e224c4bcf1"}, + {file = "newrelic-10.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fd2f3d2d290555764b587d35700069581dece2158b73e865f9adc6ccbba4375b"}, + {file = "newrelic-10.7.0.tar.gz", hash = "sha256:ac9716c115ddcf54b54115391a84ed2c318ae943b4f598b4d0248cd6edb12414"}, +] + +[package.extras] +infinite-tracing = ["grpcio", "protobuf"] + +[[package]] +name = "oauthlib" +version = "3.3.1" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, + {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "primitive-metadata" +version = "0.2025.1" +description = "a (simple?) toolset for authoring and gathering metadata as rdf" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "primitive_metadata-0.2025.1-py3-none-any.whl", hash = "sha256:feaddf223c16e06f982ce3fcaf9a13674b0d82bc0fe31a23060c6f0d867c5110"}, + {file = "primitive_metadata-0.2025.1.tar.gz", hash = "sha256:ca89a1df9338254a15549e9834394a52a545522b83a925f162e0880bd80b8d47"}, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.51" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07"}, + {file = "prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psycogreen" +version = "1.0.2" +description = "psycopg2 integration with coroutine libraries" +optional = false +python-versions = "*" +groups = ["deploy"] +files = [ + {file = "psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d"}, +] + +[[package]] +name = "psycopg2" +version = "2.9.10" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, + {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, + {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, + {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, + {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, + {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, + {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, + {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, + {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, +] + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +description = "Get CPU info with pure Python" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, + {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, +] + +[[package]] +name = "pycodestyle" +version = "2.13.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9"}, + {file = "pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "deploy"] +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] +markers = {main = "platform_python_implementation != \"PyPy\"", deploy = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} + +[[package]] +name = "pyflakes" +version = "3.3.2" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a"}, + {file = "pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b"}, +] + +[[package]] +name = "pyjwe" +version = "1.0.0" +description = "JSON Web Encryption implementation in Python" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "PyJWE-1.0.0.tar.gz", hash = "sha256:e3b3de7be4fcc260e5f1a47ead9c9a9211d8ce98f9e1d88d9a7225198aa9ce47"}, +] + +[package.dependencies] +cryptography = ">=0.9.3" + +[[package]] +name = "pyparsing" +version = "3.2.3" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, + {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "8.3.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-benchmark" +version = "5.1.0" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-benchmark-5.1.0.tar.gz", hash = "sha256:9ea661cdc292e8231f7cd4c10b0319e56a2118e2c09d9f50e1b3d150d2aca105"}, + {file = "pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89"}, +] + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=8.1" + +[package.extras] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs", "setuptools"] + +[[package]] +name = "pytest-django" +version = "4.11.1" +description = "A Django plugin for pytest." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_django-4.11.1-py3-none-any.whl", hash = "sha256:1b63773f648aa3d8541000c26929c1ea63934be1cfa674c76436966d73fe6a10"}, + {file = "pytest_django-4.11.1.tar.gz", hash = "sha256:a949141a1ee103cb0e7a20f1451d355f83f5e4a5d07bdd4dcfdd1fd0ff227991"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx", "sphinx_rtd_theme"] +testing = ["Django", "django-configurations (>=2.0)"] + +[[package]] +name = "python-crontab" +version = "3.2.0" +description = "Python Crontab API" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "python_crontab-3.2.0-py3-none-any.whl", hash = "sha256:82cb9b6a312d41ff66fd3caf3eed7115c28c195bfb50711bc2b4b9592feb9fe5"}, + {file = "python_crontab-3.2.0.tar.gz", hash = "sha256:40067d1dd39ade3460b2ad8557c7651514cd3851deffff61c5c60e1227c5c36b"}, +] + +[package.dependencies] +python-dateutil = "*" + +[package.extras] +cron-description = ["cron-descriptor"] +cron-schedule = ["croniter"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "rdflib" +version = "7.1.3" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = false +python-versions = "<4.0.0,>=3.8.1" +groups = ["main"] +files = [ + {file = "rdflib-7.1.3-py3-none-any.whl", hash = "sha256:5402310a9f0f3c07d453d73fd0ad6ba35616286fe95d3670db2b725f3f539673"}, + {file = "rdflib-7.1.3.tar.gz", hash = "sha256:f3dcb4c106a8cd9e060d92f43d593d09ebc3d07adc244f4c7315856a12e383ee"}, +] + +[package.dependencies] +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5rdf (>=1.2,<2)"] +lxml = ["lxml (>=4.3,<6.0)"] +networkx = ["networkx (>=2,<4)"] +orjson = ["orjson (>=3.9.14,<4)"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "sentry-sdk" +version = "2.24.1" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "sentry_sdk-2.24.1-py2.py3-none-any.whl", hash = "sha256:36baa6a1128b9d98d2adc5e9b2f887eff0a6af558fc2b96ed51919042413556d"}, + {file = "sentry_sdk-2.24.1.tar.gz", hash = "sha256:8ba3c29990fa48865b908b3b9dc5ae7fa7e72407c7c9e91303e5206b32d7b8b1"}, +] + +[package.dependencies] +certifi = "*" +django = {version = ">=1.8", optional = true, markers = "extra == \"django\""} +urllib3 = ">=1.26.11" + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] +http2 = ["httpcore[http2] (==1.*)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] +launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] +litestar = ["litestar (>=2.0.0)"] +loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +openfeature = ["openfeature-sdk (>=0.7.1)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro"] +pure-eval = ["asttokens", "executing", "pure_eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] +tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] + +[[package]] +name = "setuptools" +version = "80.9.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["deploy"] +files = [ + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sqlparse" +version = "0.5.3" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, + {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, +] + +[package.extras] +dev = ["build", "hatch"] +doc = ["sphinx"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, +] + +[[package]] +name = "tzdata" +version = "2025.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main", "dev"] +files = [ + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uwsgi" +version = "2.0.28" +description = "The uWSGI server" +optional = false +python-versions = "*" +groups = ["deploy"] +files = [ + {file = "uwsgi-2.0.28.tar.gz", hash = "sha256:79ca1891ef2df14508ab0471ee8c0eb94bd2d51d03f32f90c4bbe557ab1e99d0"}, +] + +[[package]] +name = "vine" +version = "5.1.0" +description = "Python promises." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, + {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "zope-event" +version = "5.0" +description = "Very basic event publishing system" +optional = false +python-versions = ">=3.7" +groups = ["deploy"] +files = [ + {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, + {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx"] +test = ["zope.testrunner"] + +[[package]] +name = "zope-interface" +version = "7.2" +description = "Interfaces for Python" +optional = false +python-versions = ">=3.8" +groups = ["deploy"] +files = [ + {file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"}, + {file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"}, + {file = "zope.interface-7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550f1c6588ecc368c9ce13c44a49b8d6b6f3ca7588873c679bd8fd88a1b557b6"}, + {file = "zope.interface-7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ef9e2f865721553c6f22a9ff97da0f0216c074bd02b25cf0d3af60ea4d6931d"}, + {file = "zope.interface-7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27f926f0dcb058211a3bb3e0e501c69759613b17a553788b2caeb991bed3b61d"}, + {file = "zope.interface-7.2-cp310-cp310-win_amd64.whl", hash = "sha256:144964649eba4c5e4410bb0ee290d338e78f179cdbfd15813de1a664e7649b3b"}, + {file = "zope.interface-7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1909f52a00c8c3dcab6c4fad5d13de2285a4b3c7be063b239b8dc15ddfb73bd2"}, + {file = "zope.interface-7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ecf2451596f19fd607bb09953f426588fc1e79e93f5968ecf3367550396b22"}, + {file = "zope.interface-7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:033b3923b63474800b04cba480b70f6e6243a62208071fc148354f3f89cc01b7"}, + {file = "zope.interface-7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a102424e28c6b47c67923a1f337ede4a4c2bba3965b01cf707978a801fc7442c"}, + {file = "zope.interface-7.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25e6a61dcb184453bb00eafa733169ab6d903e46f5c2ace4ad275386f9ab327a"}, + {file = "zope.interface-7.2-cp311-cp311-win_amd64.whl", hash = "sha256:3f6771d1647b1fc543d37640b45c06b34832a943c80d1db214a37c31161a93f1"}, + {file = "zope.interface-7.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:086ee2f51eaef1e4a52bd7d3111a0404081dadae87f84c0ad4ce2649d4f708b7"}, + {file = "zope.interface-7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:21328fcc9d5b80768bf051faa35ab98fb979080c18e6f84ab3f27ce703bce465"}, + {file = "zope.interface-7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6dd02ec01f4468da0f234da9d9c8545c5412fef80bc590cc51d8dd084138a89"}, + {file = "zope.interface-7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e7da17f53e25d1a3bde5da4601e026adc9e8071f9f6f936d0fe3fe84ace6d54"}, + {file = "zope.interface-7.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cab15ff4832580aa440dc9790b8a6128abd0b88b7ee4dd56abacbc52f212209d"}, + {file = "zope.interface-7.2-cp312-cp312-win_amd64.whl", hash = "sha256:29caad142a2355ce7cfea48725aa8bcf0067e2b5cc63fcf5cd9f97ad12d6afb5"}, + {file = "zope.interface-7.2-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:3e0350b51e88658d5ad126c6a57502b19d5f559f6cb0a628e3dc90442b53dd98"}, + {file = "zope.interface-7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15398c000c094b8855d7d74f4fdc9e73aa02d4d0d5c775acdef98cdb1119768d"}, + {file = "zope.interface-7.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:802176a9f99bd8cc276dcd3b8512808716492f6f557c11196d42e26c01a69a4c"}, + {file = "zope.interface-7.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb23f58a446a7f09db85eda09521a498e109f137b85fb278edb2e34841055398"}, + {file = "zope.interface-7.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a71a5b541078d0ebe373a81a3b7e71432c61d12e660f1d67896ca62d9628045b"}, + {file = "zope.interface-7.2-cp313-cp313-win_amd64.whl", hash = "sha256:4893395d5dd2ba655c38ceb13014fd65667740f09fa5bb01caa1e6284e48c0cd"}, + {file = "zope.interface-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3a8ffec2a50d8ec470143ea3d15c0c52d73df882eef92de7537e8ce13475e8a"}, + {file = "zope.interface-7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:31d06db13a30303c08d61d5fb32154be51dfcbdb8438d2374ae27b4e069aac40"}, + {file = "zope.interface-7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e204937f67b28d2dca73ca936d3039a144a081fc47a07598d44854ea2a106239"}, + {file = "zope.interface-7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b7b0314f919e751f2bca17d15aad00ddbb1eadf1cb0190fa8175edb7ede62"}, + {file = "zope.interface-7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf95683cde5bc7d0e12d8e7588a3eb754d7c4fa714548adcd96bdf90169f021"}, + {file = "zope.interface-7.2-cp38-cp38-win_amd64.whl", hash = "sha256:7dc5016e0133c1a1ec212fc87a4f7e7e562054549a99c73c8896fa3a9e80cbc7"}, + {file = "zope.interface-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bd449c306ba006c65799ea7912adbbfed071089461a19091a228998b82b1fdb"}, + {file = "zope.interface-7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a19a6cc9c6ce4b1e7e3d319a473cf0ee989cbbe2b39201d7c19e214d2dfb80c7"}, + {file = "zope.interface-7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cd1790b48c16db85d51fbbd12d20949d7339ad84fd971427cf00d990c1f137"}, + {file = "zope.interface-7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52e446f9955195440e787596dccd1411f543743c359eeb26e9b2c02b077b0519"}, + {file = "zope.interface-7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad9913fd858274db8dd867012ebe544ef18d218f6f7d1e3c3e6d98000f14b75"}, + {file = "zope.interface-7.2-cp39-cp39-win_amd64.whl", hash = "sha256:1090c60116b3da3bfdd0c03406e2f14a1ff53e5771aebe33fec1edc0a350175d"}, + {file = "zope.interface-7.2.tar.gz", hash = "sha256:8b49f1a3d1ee4cdaf5b32d2e738362c7f5e40ac8b46dd7d1a65e82a4872728fe"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"] +test = ["coverage[toml]", "zope.event", "zope.testing"] +testing = ["coverage[toml]", "zope.event", "zope.testing"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.13,<3.14" +content-hash = "cb2722bceed3082c7039af5a541855a7ce39531401e843dddb0e6493b604adeb" diff --git a/project/settings.py b/project/settings.py index 19d5b02c1..a29abf4ef 100644 --- a/project/settings.py +++ b/project/settings.py @@ -15,7 +15,7 @@ from celery.schedules import crontab import jwe -from share import __version__ +from share.version import get_shtrove_version from trove.util.queryparams import parse_booly_str @@ -43,7 +43,7 @@ def split(string, delim): # SECURITY WARNING: don't run with debug turned on in production! DEBUG = bool(os.environ.get('DEBUG', True)) -VERSION = __version__ +VERSION = get_shtrove_version() GIT_COMMIT = os.environ.get('GIT_COMMIT', None) ALLOWED_HOSTS = [h for h in os.environ.get('ALLOWED_HOSTS', '').split(' ') if h] @@ -237,7 +237,7 @@ def split(string, delim): else VERSION ), send_default_pii=False, - request_bodies='never', + max_request_body_size='never', debug=DEBUG, integrations=[ DjangoIntegration( @@ -268,9 +268,7 @@ def split(string, delim): 'django.contrib.auth.hashers.BCryptPasswordHasher', 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', - 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', - 'django.contrib.auth.hashers.CryptPasswordHasher', ] @@ -306,10 +304,6 @@ def split(string, delim): 'MAX_RETRIES': int(os.environ.get('ELASTICSEARCH_MAX_RETRIES', 7)), 'POST_INDEX_DELAY': int(os.environ.get('ELASTICSEARCH_POST_INDEX_DELAY', 3)), } -ELASTICSEARCH5_URL = ( - os.environ.get('ELASTICSEARCH5_URL') - or os.environ.get('ELASTICSEARCH_URL') # backcompat -) ELASTICSEARCH8_URL = os.environ.get('ELASTICSEARCH8_URL') ELASTICSEARCH8_CERT_PATH = os.environ.get('ELASTICSEARCH8_CERT_PATH') ELASTICSEARCH8_USERNAME = os.environ.get('ELASTICSEARCH8_USERNAME', 'elastic') @@ -471,41 +465,6 @@ def route_urgent_task(name, args, kwargs, options, task=None, **kw): HIDE_DEPRECATED_VIEWS = parse_booly_str(os.environ.get('HIDE_DEPRECATED_VIEWS', 'False')) -# Regulator pipeline, names of setuptools entry points -SHARE_REGULATOR_CONFIG = { - 'NODE_STEPS': [ - 'tokenize_tags', - 'whitespace', - 'normalize_agent_names', - 'cited_as', - ('normalize_iris', { - 'node_types': ['workidentifier'], - 'blocked_schemes': ['mailto'], - 'blocked_authorities': ['issn', 'orcid.org'], - }), - ('normalize_iris', { - 'node_types': ['agentidentifier'], - 'blocked_schemes': ['mailto'], - 'blocked_authorities': ['secure.gravatar.com'], - }), - ('trim_cycles', { - 'node_types': ['abstractworkrelation', 'abstractagentrelation'], - 'relation_fields': ['subject', 'related'], - }), - ('trim_cycles', { - 'node_types': ['subject'], - 'relation_fields': ['central_synonym'], - 'delete_node': False, - }), - ], - 'GRAPH_STEPS': [ - 'deduplicate', - ], - 'VALIDATE_STEPS': [ - 'jsonld_validator', - ], -} - # API KEYS DATAVERSE_API_KEY = os.environ.get('DATAVERSE_API_KEY') PLOS_API_KEY = os.environ.get('PLOS_API_KEY') diff --git a/pyproject.toml b/pyproject.toml index 35074d936..d3a255b8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,76 @@ +[project] +name = "shtrove" +version = "25.4.0" +description = "" +authors = [ + {name = "CenterForOpenScience", email = "share-support@cos.io"} +] +readme = "README.md" +requires-python = ">=3.13,<3.14" +dependencies = [ + "bcrypt==4.3.0", # Apache 2.0 + "celery==5.4.0", # BSD 3 Clause + "colorlog==6.9.0", # MIT + "django-allauth==65.5.0", # MIT + "django-celery-beat==2.8.1", # BSD 3 Clause + "django-cors-headers==4.7.0", # MIT + "django-extensions==3.2.3", # MIT + "django-filter==25.1", # BSD + "django-oauth-toolkit==3.0.1", # BSD + "django==5.2.*", # BSD 3 Clause + "elasticsearch8==8.17.2", # Apache 2.0 + "lxml==5.3.0", # BSD + "kombu==5.5.0", # BSD 3 Clause + "markdown2==2.5.3", # MIT + "psycopg2==2.9.10", # LGPL with exceptions or ZPL + "rdflib==7.1.3", # BSD 3 Clause + "requests==2.32.3", # Apache 2.0 + "primitive_metadata==0.2025.1", # MIT + "sentry-sdk[django]==2.24.1", # MIT + + # to be removed in near-future work: + "djangorestframework==3.16.0", # BSD + "djangorestframework-jsonapi==7.1.0", # BSD + "PyJWE==1.0.0", # Apache 2.0 +] + +[tool.poetry] +packages = [ + { include = "trove" }, +] + +# "deploy" dependency group relevant only in deployment: +[tool.poetry.group.deploy] +optional = true +[tool.poetry.group.deploy.dependencies] +uwsgi = "2.0.28" +newrelic = "10.7.0" # newrelic APM agent, Custom License +gevent = "24.11.1" # MIT +psycogreen = "1.0.2" # BSD + + +# "dev" dependency group relevant only for local dev: +[tool.poetry.group.dev] +optional = true +[tool.poetry.group.dev.dependencies] +coveralls = "3.3.1" +toml = "0.10.2" # until coveralls/coverage update to tomllib +django-debug-toolbar = "5.1.0" +factory-boy = "3.3.3" +faker = "37.1.0" +flake8 = "7.2.0" +pytest-benchmark = "5.1.0" +pytest = "8.3.5" +pytest-django = "4.11.1" +mypy = "1.16.1" + +### +# other stuff + [build-system] -requires = ["setuptools>=61.0"] -build-backend = "setuptools.build_meta" +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" [tool.coverage.run] +source = ["."] omit = ["tests/*"] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 772c86475..000000000 --- a/requirements.txt +++ /dev/null @@ -1,34 +0,0 @@ -bcrypt==3.2.0 # Apache 2.0 -celery==5.4.0 # BSD 3 Clause -colorlog==5.0.1 # MIT -django-allauth==0.63.6 # MIT -django-celery-beat==2.6.0 # BSD 3 Clause -django-cors-headers==3.7.0 # MIT -django-extensions==3.1.3 # MIT -django-filter==2.4.0 # BSD -django-oauth-toolkit==1.7.1 # BSD -django==3.2.25 # BSD 3 Clause -djangorestframework==3.12.4 # BSD -elasticsearch8==8.5.2 # Apache 2.0 -lxml==4.9.1 # BSD -kombu==5.3.7 # BSD 3 Clause -markdown2==2.4.10 # MIT -newrelic==8.4.0 # newrelic APM agent, Custom License -psycopg2==2.9.5 # LGPL with exceptions or ZPL -rdflib==7.0.0 # BSD 3 Clause -requests==2.25.1 # Apache 2.0 -primitive_metadata==0.2025.1 # MIT - -# relevant only in deployment: -sentry-sdk[django]==1.22.2 # MIT -gevent==22.10.2 # MIT -psycogreen==1.0.2 # BSD - -# to be removed in future work: -docopt==0.6.2 # MIT -elasticsearch5==5.5.6 # Apache 2.0 -PyJWE==1.0.0 # Apache 2.0 - -# djangorestframework-jsonapi==4.2.1 # MIT -# Allows custom-rendered IDs, hiding null values, and including data in error responses -git+https://github.com/cos-forks/django-rest-framework-json-api.git@v4.2.1+cos0 diff --git a/setup.cfg b/setup.cfg index 168a1e8cd..05d23205a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,9 +7,6 @@ max-line-length = 250 ignore = E501,W503,F403,E266,F405 exclude = ./scratch/* -[behave] -paths=./tests/features - [tool:pytest] DJANGO_SETTINGS_MODULE=project.settings addopts = --benchmark-skip diff --git a/setup.py b/setup.py deleted file mode 100644 index 97414fdd2..000000000 --- a/setup.py +++ /dev/null @@ -1,110 +0,0 @@ -from setuptools import setup, find_packages -from share import __version__ - -setup( - name='share', - version=__version__, - packages=find_packages(exclude=('tests*')), - provides=[ - 'share.transformers', - 'share.harvesters' - ], - entry_points={ - 'share.transformers': [ - 'ca.lwbin = share.transformers.ca_lwbin:LWBINTransformer', - 'com.biomedcentral = share.transformers.com_biomedcentral:BioMedCentralTransformer', - 'com.dailyssrn = share.transformers.com_dailyssrn:DailySSRNTransformer', - 'com.figshare = share.transformers.com_figshare:FigshareTransformer', - 'com.figshare.v2 = share.transformers.com_figshare_v2:FigshareV2Transformer', - 'com.mendeley.data = share.transformers.com_mendeley_data:MendeleyTransformer', - 'com.peerj = share.transformers.com_peerj:PeerJTransformer', - 'com.peerj.xml = share.transformers.com_peerj_xml:PeerJXMLTransformer', - 'com.researchregistry = share.transformers.com_researchregistry:RRTransformer', - 'com.springer = share.transformers.com_springer:SpringerTransformer', - 'edu.ageconsearch = share.transformers.edu_ageconsearch:AgeconTransformer', - 'edu.gwu = share.transformers.edu_gwu:GWScholarSpaceTransformer', - 'edu.harvarddataverse = share.transformers.edu_harvarddataverse:HarvardTransformer', - 'gov.clinicaltrials = share.transformers.gov_clinicaltrials:ClinicalTrialsTransformer', - 'gov.nih = share.transformers.gov_nih:NIHTransformer', - 'gov.nsfawards = share.transformers.gov_nsfawards:NSFTransformer', - 'gov.pubmedcentral.pmc = share.transformers.gov_pubmedcentral_pmc:PMCTransformer', - 'gov.scitech = share.transformers.gov_scitech:ScitechTransformer', - 'gov.usgs = share.transformers.gov_usgs:USGSTransformer', - 'io.osf = share.transformers.io_osf:OSFTransformer', - 'io.osf.preprints = share.transformers.io_osf_preprints:PreprintTransformer', - 'io.osf.registrations = share.transformers.io_osf_registrations:OSFRegistrationsTransformer', - 'mods = share.transformers.mods:MODSTransformer', - 'oai_dc = share.transformers.oai:OAITransformer', - 'org.arxiv = share.transformers.org_arxiv:ArxivTransformer', - 'org.biorxiv = share.transformers.org_biorxiv:BiorxivTransformer', - 'org.biorxiv.rss = share.transformers.org_biorxiv_rss:BiorxivRSSTransformer', - 'org.biorxiv.html = share.transformers.org_biorxiv_html:BiorxivHTMLTransformer', - 'org.crossref = share.transformers.org_crossref:CrossrefTransformer', - 'org.datacite = share.transformers.org_datacite:DataciteTransformer', - 'org.dataone = share.transformers.org_dataone:DataoneTransformer', - 'org.elife = share.transformers.org_elife:ElifeTransformer', - 'org.engrxiv = share.transformers.org_engrxiv:EngrxivTransformer', - 'org.ncar = share.transformers.org_ncar:NCARTransformer', - 'org.neurovault = share.transformers.org_neurovault:NeurovaultTransformer', - 'org.plos = share.transformers.org_plos:PLoSTransformer', - 'org.psyarxiv = share.transformers.org_psyarxiv:PsyarxivTransformer', - 'org.socialscienceregistry = share.transformers.org_socialscienceregistry:SCTransformer', - 'org.socarxiv = share.transformers.org_socarxiv:SocarxivTransformer', - 'org.swbiodiversity = share.transformers.org_swbiodiversity:SWTransformer', - 'v1_push = share.transformers.v1_push:V1Transformer', - 'v2_push = share.transformers.v2_push:V2PushTransformer', - ], - 'share.harvesters': [ - 'ca.lwbin = share.harvesters.ca_lwbin:LWBINHarvester', - 'com.biomedcentral = share.harvesters.com_biomedcentral:BiomedCentralHarvester', - 'com.figshare = share.harvesters.com_figshare:FigshareHarvester', - 'com.figshare.v2 = share.harvesters.com_figshare_v2:FigshareHarvester', - 'com.mendeley.data = share.harvesters.com_mendeley_data:MendeleyHarvester', - 'com.peerj = share.harvesters.com_peerj:PeerJHarvester', - 'com.researchregistry = share.harvesters.com_researchregistry:ResearchRegistryHarvester', - 'com.springer = share.harvesters.com_springer:SpringerHarvester', - 'edu.ageconsearch = share.harvesters.edu_ageconsearch:AgEconHarvester', - 'edu.gwu = share.harvesters.edu_gwu:GWScholarSpaceHarvester', - 'edu.harvarddataverse = share.harvesters.edu_harvarddataverse:HarvardDataverseHarvester', - 'gov.clinicaltrials = share.harvesters.gov_clinicaltrials:ClinicalTrialsHarvester', - 'gov.doepages = share.harvesters.gov_doepages:DoepagesHarvester', - 'gov.nih = share.harvesters.gov_nih:NIHHarvester', - 'gov.nsfawards = share.harvesters.gov_nsfawards:NSFAwardsHarvester', - 'gov.scitech = share.harvesters.gov_scitech:SciTechHarvester', - 'gov.usgs = share.harvesters.gov_usgs:USGSHarvester', - 'io.osf = share.harvesters.io_osf:OSFHarvester', - 'oai = share.harvesters.oai:OAIHarvester', - 'org.arxiv = share.harvesters.org_arxiv:ArxivHarvester', - 'org.biorxiv = share.harvesters.org_biorxiv:BiorxivHarvester', - 'org.biorxiv.rss = share.harvesters.org_biorxiv_rss:BiorxivHarvester', - 'org.biorxiv.html = share.harvesters.org_biorxiv_html:BiorxivHarvester', - 'org.crossref = share.harvesters.org_crossref:CrossRefHarvester', - 'org.dataone = share.harvesters.org_dataone:DataOneHarvester', - 'org.elife = share.harvesters.org_elife:ELifeHarvester', - 'org.ncar = share.harvesters.org_ncar:NCARHarvester', - 'org.neurovault = share.harvesters.org_neurovault:NeuroVaultHarvester', - 'org.plos = share.harvesters.org_plos:PLOSHarvester', - 'org.socialscienceregistry = share.harvesters.org_socialscienceregistry:SCHarvester', - 'org.swbiodiversity = share.harvesters.org_swbiodiversity:SWHarvester', - ], - 'share.regulate.steps.node': [ - 'cited_as = share.regulate.steps.cited_as:CitedAs', - 'trim_cycles = share.regulate.steps.trim_cycles:TrimCycles', - 'block_extra_values = share.regulate.steps.block_extra_values:BlockExtraValues', - 'normalize_agent_names = share.regulate.steps.normalize_agent_names:NormalizeAgentNames', - 'normalize_iris = share.regulate.steps.normalize_iris:NormalizeIRIs', - 'tokenize_tags = share.regulate.steps.tokenize_tags:TokenizeTags', - 'whitespace = share.regulate.steps.whitespace:StripWhitespace', - ], - 'share.regulate.steps.graph': [ - 'deduplicate = share.regulate.steps.deduplicate:Deduplicate', - ], - 'share.regulate.steps.validate': [ - 'jsonld_validator = share.regulate.steps.validate:JSONLDValidatorStep', - ], - 'share.metadata_formats': [ - 'sharev2_elastic = share.metadata_formats.sharev2_elastic:ShareV2ElasticFormatter', - 'oai_dc = share.metadata_formats.oai_dc:OaiDcFormatter', - ], - } -) diff --git a/share/__init__.py b/share/__init__.py index 800b9684a..ea9b78354 100644 --- a/share/__init__.py +++ b/share/__init__.py @@ -1 +1 @@ -from share.version import __version__ # noqa +__all__ = () diff --git a/share/admin/__init__.py b/share/admin/__init__.py index 6c5fa04c8..ae2bcbc4b 100644 --- a/share/admin/__init__.py +++ b/share/admin/__init__.py @@ -76,7 +76,7 @@ def source_(self, obj): def enabled(self, obj): return not obj.disabled - enabled.boolean = True # type: ignore[attr-defined] + enabled.boolean = True @admin.action(description='schedule re-derive of all cards for each selected source config') def schedule_derive(self, request, queryset): diff --git a/share/admin/celery.py b/share/admin/celery.py index 3537f78fc..7e955e87f 100644 --- a/share/admin/celery.py +++ b/share/admin/celery.py @@ -39,8 +39,9 @@ def lookups(self, request, model_admin): return sorted((x, x.title()) for x in states.ALL_STATES) def queryset(self, request, queryset): - if self.value(): - return queryset.filter(status=self.value().upper()) + _value = self.value() + if _value: + return queryset.filter(status=_value.upper()) return queryset diff --git a/share/admin/util.py b/share/admin/util.py index fceb35677..54bc7d248 100644 --- a/share/admin/util.py +++ b/share/admin/util.py @@ -1,6 +1,9 @@ -from django.contrib.admin import SimpleListFilter +from collections.abc import Callable, Sequence + +from django.contrib.admin import SimpleListFilter, ModelAdmin from django.core.paginator import Paginator from django.db import connection, transaction, OperationalError +from django.db.models import Model from django.utils.functional import cached_property from django.urls import reverse from django.utils.html import format_html @@ -46,7 +49,7 @@ def admin_link_html(linked_obj): return format_html('{}', url, repr(linked_obj)) -def linked_fk(field_name): +def linked_fk[T: type[ModelAdmin]](field_name: str) -> Callable[[T], T]: """Decorator that adds a link for a foreign key field """ def add_link(cls): @@ -62,11 +65,15 @@ def link(self, instance): return add_link -def linked_many(field_name, order_by=None, select_related=None, defer=None): - """Decorator that adds links for a *-to-many field - """ - def add_links(cls): - def links(self, instance): +def linked_many[T: type[ModelAdmin]]( + field_name: str, + order_by: Sequence[str] = (), + select_related: Sequence[str] = (), + defer: Sequence[str] = (), +) -> Callable[[T], T]: + """Decorator that adds links for a *-to-many field""" + def add_links(cls: T) -> T: + def links(self, instance: Model) -> str: linked_qs = getattr(instance, field_name).all() if select_related: linked_qs = linked_qs.select_related(*select_related) @@ -81,7 +88,7 @@ def links(self, instance): for obj in linked_qs )) ) - links_field = '{}_links'.format(field_name) + links_field = f'{field_name}_links' links.short_description = field_name.replace('_', ' ') setattr(cls, links_field, links) append_to_cls_property(cls, 'readonly_fields', links_field) diff --git a/share/celery.py b/share/celery.py index ff0f626c5..663ddbba9 100644 --- a/share/celery.py +++ b/share/celery.py @@ -14,9 +14,9 @@ import sentry_sdk -from share.util import chunked from share.models import CeleryTaskResult from share.models.sql import GroupBy +from trove.util.django import pk_chunked logger = logging.getLogger(__name__) @@ -168,7 +168,7 @@ def delete_queryset(self, queryset): try: with transaction.atomic(): # .delete loads the entire queryset and can't be sliced... Hooray - for ids in chunked(queryset.values_list('id', flat=True).iterator(), size=self.chunk_size): + for ids in pk_chunked(queryset, chunksize=self.chunk_size): num_deleted, _ = queryset.model.objects.filter(id__in=ids).delete() total_deleted += num_deleted except Exception as e: diff --git a/share/migrations/0001_squashed_0058_big_rend.py b/share/migrations/0001_squashed_0058_big_rend.py index 64b388823..bb209d1bc 100644 --- a/share/migrations/0001_squashed_0058_big_rend.py +++ b/share/migrations/0001_squashed_0058_big_rend.py @@ -149,7 +149,7 @@ class Migration(migrations.Migration): ('date_started', models.DateTimeField(blank=True, null=True)), ('date_created', models.DateTimeField(auto_now_add=True)), ('date_modified', models.DateTimeField(auto_now=True, db_index=True)), - ('share_version', models.TextField(default=share.version.get_share_version, editable=False)), + ('share_version', models.TextField(default=share.version.get_shtrove_version, editable=False)), ('source_config_version', models.PositiveIntegerField()), ('end_date', models.DateTimeField(db_index=True)), ('start_date', models.DateTimeField(db_index=True)), @@ -319,7 +319,7 @@ class Migration(migrations.Migration): ('traceback', models.TextField(blank=True, editable=False, null=True)), ('date_created', models.DateTimeField(auto_now_add=True)), ('date_modified', models.DateTimeField(auto_now=True, db_index=True)), - ('share_version', models.TextField(default=share.version.get_share_version, editable=False)), + ('share_version', models.TextField(default=share.version.get_shtrove_version, editable=False)), ], options={ 'verbose_name': 'Celery Task Result', @@ -436,7 +436,7 @@ class Migration(migrations.Migration): ('date_started', models.DateTimeField(blank=True, null=True)), ('date_created', models.DateTimeField(auto_now_add=True)), ('date_modified', models.DateTimeField(auto_now=True, db_index=True)), - ('share_version', models.TextField(default=share.version.get_share_version, editable=False)), + ('share_version', models.TextField(default=share.version.get_shtrove_version, editable=False)), ('source_config_version', models.PositiveIntegerField()), ('transformer_version', models.PositiveIntegerField()), ('regulator_version', models.PositiveIntegerField()), diff --git a/share/models/celery.py b/share/models/celery.py index df5aba191..190fe994e 100644 --- a/share/models/celery.py +++ b/share/models/celery.py @@ -3,7 +3,7 @@ from django.db import models from share.models.fields import DateTimeAwareJSONField -from share.version import get_share_version +from share.version import get_shtrove_version ALL_STATES = sorted(states.ALL_STATES) @@ -26,7 +26,7 @@ class CeleryTaskResult(models.Model): date_created = models.DateTimeField(auto_now_add=True, editable=False) date_modified = models.DateTimeField(auto_now=True, editable=False, db_index=True) - share_version = models.TextField(default=get_share_version, editable=False) + share_version = models.TextField(default=get_shtrove_version, editable=False) class Meta: verbose_name = 'Celery Task Result' diff --git a/share/models/fields.py b/share/models/fields.py index fa85c9f9b..2c2f4991b 100644 --- a/share/models/fields.py +++ b/share/models/fields.py @@ -48,13 +48,10 @@ def __init__(self, *args, object_hook=None, **kwargs): class DateTimeAwareJSONField(models.JSONField): - def __init__(self, *args, encoder=None, decoder=None, **kwargs): - return super().__init__( - *args, - **kwargs, - encoder=DateTimeAwareJSONEncoder, - decoder=DateTimeAwareJSONDecoder, - ) + def __init__(self, *args, **kwargs): + kwargs['encoder'] = DateTimeAwareJSONEncoder + kwargs['decoder'] = DateTimeAwareJSONDecoder + return super().__init__(*args, **kwargs) class ShareURLField(models.TextField): @@ -69,10 +66,10 @@ def deconstruct(self): kwargs.pop('max_length', None) return name, path, args, kwargs - def formfield(self, **kwargs): + def formfield(self, **kwargs): # type: ignore[override] # As with CharField, this will cause URL validation to be performed # twice. - defaults = { + defaults: dict = { 'form_class': forms.URLField, } if self.null and self.unique: @@ -89,7 +86,7 @@ class EncryptedJSONField(models.BinaryField): """ prefix = b'jwe:::' - def get_db_prep_value(self, input_json, **kwargs): + def get_db_prep_value(self, input_json, **kwargs): # type: ignore[override] if not input_json: return None diff --git a/share/models/index_backfill.py b/share/models/index_backfill.py index 5a2e6d35f..93f18ab6a 100644 --- a/share/models/index_backfill.py +++ b/share/models/index_backfill.py @@ -160,26 +160,25 @@ def task__schedule_index_backfill(self, index_backfill_pk): _messenger = IndexMessenger(celery_app=self.app, index_strategys=[_index_strategy]) _messagetype = _index_strategy.backfill_message_type assert _messagetype in _index_strategy.supported_message_types + _target_queryset: models.QuerySet if _messagetype == MessageType.BACKFILL_INDEXCARD: - _targetid_queryset = ( + _target_queryset = ( trove_db.Indexcard.objects .exclude(source_record_suid__source_config__disabled=True) .exclude(source_record_suid__source_config__source__is_deleted=True) - .values_list('id', flat=True) ) elif _messagetype == MessageType.BACKFILL_SUID: - _targetid_queryset = ( + _target_queryset = ( db.SourceUniqueIdentifier.objects .exclude(source_config__disabled=True) .exclude(source_config__source__is_deleted=True) - .values_list('id', flat=True) ) else: raise ValueError(f'unknown backfill messagetype {_messagetype}') _chunk_size = settings.ELASTICSEARCH['CHUNK_SIZE'] _messenger.stream_message_chunks( _messagetype, - _targetid_queryset.iterator(chunk_size=_chunk_size), + _target_queryset, chunk_size=_chunk_size, urgent=False, ) diff --git a/share/models/source_config.py b/share/models/source_config.py index a23dfcaf9..1f45d5c66 100644 --- a/share/models/source_config.py +++ b/share/models/source_config.py @@ -1,3 +1,4 @@ +from __future__ import annotations from django.db import models @@ -9,13 +10,13 @@ __all__ = ('SourceConfig',) -class SourceConfigManager(models.Manager): +class SourceConfigManager(models.Manager['SourceConfig']): use_in_migrations = True - def get_by_natural_key(self, key): + def get_by_natural_key(self, key) -> SourceConfig: return self.get(label=key) - def get_or_create_push_config(self, user, transformer_key=None): + def get_or_create_push_config(self, user, transformer_key=None) -> SourceConfig: assert isinstance(user, ShareUser) _config_label = '.'.join(( user.username, diff --git a/share/models/source_unique_identifier.py b/share/models/source_unique_identifier.py index 05c6eb7d5..63c8e3c85 100644 --- a/share/models/source_unique_identifier.py +++ b/share/models/source_unique_identifier.py @@ -19,7 +19,7 @@ class JSONAPIMeta(BaseJSONAPIMeta): class Meta: unique_together = ('identifier', 'source_config') - def get_backcompat_sharev2_suid(self): + def get_backcompat_sharev2_suid(self) -> 'SourceUniqueIdentifier': '''get an equivalent "v2_push" suid for this suid for filling the legacy suid-based sharev2 index with consistent doc ids diff --git a/share/oaipmh/util.py b/share/oaipmh/util.py index 3a033227a..413ac0173 100644 --- a/share/oaipmh/util.py +++ b/share/oaipmh/util.py @@ -1,3 +1,6 @@ +import datetime +from typing import Any + from lxml import etree from primitive_metadata import primitive_rdf @@ -5,7 +8,7 @@ from trove.vocab.namespaces import OAI, OAI_DC -def format_datetime(dt): +def format_datetime(dt: datetime.datetime | primitive_rdf.Literal | str) -> str: """OAI-PMH has specific time format requirements -- comply. """ if isinstance(dt, primitive_rdf.Literal): @@ -25,7 +28,7 @@ def format_datetime(dt): } -def ns(namespace_prefix, tag_name): +def ns(namespace_prefix: str, tag_name: str) -> str: """format XML tag/attribute name with full namespace URI see https://lxml.de/tutorial.html#namespaces @@ -33,7 +36,7 @@ def ns(namespace_prefix, tag_name): return f'{{{XML_NAMESPACES[namespace_prefix]}}}{tag_name}' -def nsmap(*namespace_prefixes, default=None): +def nsmap(*namespace_prefixes: str, default: str | None = None) -> dict[str | None, str]: """build a namespace map suitable for lxml see https://lxml.de/tutorial.html#namespaces @@ -49,7 +52,7 @@ def nsmap(*namespace_prefixes, default=None): # wrapper for lxml.etree.SubElement, adds `text` kwarg for convenience -def SubEl(parent, tag_name, text=None, **kwargs): +def SubEl(parent: etree.Element, tag_name: str, text: str | None = None, **kwargs: Any) -> etree.SubElement: element = etree.SubElement(parent, tag_name, **kwargs) if isinstance(text, primitive_rdf.Literal): _language_tag = text.language diff --git a/share/search/index_messenger.py b/share/search/index_messenger.py index 34cfb9e7d..06b256fe9 100644 --- a/share/search/index_messenger.py +++ b/share/search/index_messenger.py @@ -12,7 +12,8 @@ from share.search.messages import MessagesChunk, MessageType from share.search import index_strategy - +from trove.models import Indexcard +from trove.util.django import pk_chunked logger = logging.getLogger(__name__) @@ -25,7 +26,7 @@ class IndexMessenger: 'max_retries': 30, # give up after 30 tries. } - def __init__(self, *, celery_app=None, index_strategys=None): + def __init__(self, *, celery_app=None, index_strategys=None) -> None: self.celery_app = ( celery.current_app if celery_app is None @@ -33,12 +34,12 @@ def __init__(self, *, celery_app=None, index_strategys=None): ) self.index_strategys = index_strategys or tuple(index_strategy.each_strategy()) - def notify_indexcard_update(self, indexcards, *, urgent=False): + def notify_indexcard_update(self, indexcards: list[Indexcard], *, urgent=False) -> None: self.send_messages_chunk( MessagesChunk( MessageType.UPDATE_INDEXCARD, [ - _indexcard.id + _indexcard.pk for _indexcard in indexcards ], ), @@ -53,7 +54,7 @@ def notify_indexcard_update(self, indexcards, *, urgent=False): urgent=urgent, ) - def notify_suid_update(self, suid_ids, *, urgent=False): + def notify_suid_update(self, suid_ids, *, urgent=False) -> None: self.send_messages_chunk( MessagesChunk(MessageType.INDEX_SUID, suid_ids), urgent=urgent, @@ -121,14 +122,14 @@ def send_messages_chunk(self, messages_chunk: MessagesChunk, *, urgent=False): def stream_message_chunks( self, message_type: MessageType, - id_stream: typing.Iterable[int], + target_queryset, *, - chunk_size, + chunk_size: int, urgent=False, ): with self._open_message_queues(message_type, urgent) as message_queues: - for messages_chunk in MessagesChunk.stream_chunks(message_type, id_stream, chunk_size): - self._put_messages_chunk(messages_chunk, message_queues) + for _pk_chunk in pk_chunked(target_queryset, chunk_size): + self._put_messages_chunk(MessagesChunk(message_type, _pk_chunk), message_queues) @contextlib.contextmanager def _open_message_queues(self, message_type, urgent): diff --git a/share/search/index_strategy/__init__.py b/share/search/index_strategy/__init__.py index 943e67f30..ff5100d35 100644 --- a/share/search/index_strategy/__init__.py +++ b/share/search/index_strategy/__init__.py @@ -5,9 +5,7 @@ from django.conf import settings from share.search.exceptions import IndexStrategyError -from share.models import FeatureFlag from trove.trovesearch import search_params -from .sharev2_elastic5 import Sharev2Elastic5IndexStrategy from .sharev2_elastic8 import Sharev2Elastic8IndexStrategy from .trovesearch_denorm import TrovesearchDenormIndexStrategy from ._base import IndexStrategy @@ -31,10 +29,6 @@ class _AvailableStrategies(enum.Enum): (don't import this enum directly -- access via the other functions in this module) ''' - - if settings.ELASTICSEARCH5_URL: - sharev2_elastic5 = Sharev2Elastic5IndexStrategy('sharev2_elastic5') - if settings.ELASTICSEARCH8_URL: sharev2_elastic8 = Sharev2Elastic8IndexStrategy('sharev2_elastic8') trovesearch_denorm = TrovesearchDenormIndexStrategy('trovesearch_denorm') @@ -79,11 +73,6 @@ def get_strategy( def get_strategy_for_sharev2_search(requested_name: str | None = None) -> IndexStrategy: if requested_name: _name = requested_name - elif ( - settings.ELASTICSEARCH5_URL - and not FeatureFlag.objects.flag_is_up(FeatureFlag.ELASTIC_EIGHT_DEFAULT) - ): - _name = _AvailableStrategies.sharev2_elastic5.name elif settings.ELASTICSEARCH8_URL: _name = _AvailableStrategies.sharev2_elastic8.name else: diff --git a/share/search/index_strategy/elastic8.py b/share/search/index_strategy/elastic8.py index c73af5989..ea4ea713b 100644 --- a/share/search/index_strategy/elastic8.py +++ b/share/search/index_strategy/elastic8.py @@ -105,7 +105,7 @@ def compute_strategy_checksum(cls): return ChecksumIri.digest_json( checksumalgorithm_name='sha-256', salt=cls.__name__, - raw_json=_current_json, + raw_json=_current_json, # type: ignore[arg-type] ) # abstract method from IndexStrategy @@ -125,7 +125,7 @@ def _get_elastic8_client(cls) -> elasticsearch8.Elasticsearch: should_sniff = settings.ELASTICSEARCH['SNIFF'] timeout = settings.ELASTICSEARCH['TIMEOUT'] return elasticsearch8.Elasticsearch( - settings.ELASTICSEARCH8_URL, + hosts=settings.ELASTICSEARCH8_URL, # security: ca_certs=settings.ELASTICSEARCH8_CERT_PATH, basic_auth=( diff --git a/share/search/index_strategy/sharev2_elastic5.py b/share/search/index_strategy/sharev2_elastic5.py deleted file mode 100644 index c8cb990dd..000000000 --- a/share/search/index_strategy/sharev2_elastic5.py +++ /dev/null @@ -1,456 +0,0 @@ -from __future__ import annotations -import functools -import json -import logging - -from django.conf import settings -import elasticsearch5 -import elasticsearch5.helpers - -from share.models import SourceUniqueIdentifier -from share.search import exceptions, messages -from share.search.index_status import IndexStatus -from share.search.index_strategy._base import IndexStrategy -from share.search.index_strategy._util import timestamp_to_readable_datetime -from share.util import IDObfuscator -from share.util.checksum_iri import ChecksumIri -from trove import models as trove_db -from trove.vocab.namespaces import SHAREv2 - - -logger = logging.getLogger(__name__) - - -def get_doc_id(suid_id): - return IDObfuscator.encode_id(suid_id, SourceUniqueIdentifier) - - -# using a static, single-index strategy to represent the existing "share_postrend_backcompat" -# search index in elastic5, with intent to put new work in elastic8+ and drop elastic5 soon. -# (see share.search.index_strategy.sharev2_elastic8 for this same index in elastic8) -class Sharev2Elastic5IndexStrategy(IndexStrategy): - CURRENT_STRATEGY_CHECKSUM = ChecksumIri( - checksumalgorithm_name='sha-256', - salt='Sharev2Elastic5IndexStrategy', - hexdigest='7b6620bfafd291489e2cfea7e645b8311c2485a3012e467abfee4103f7539cc4', - ) - STATIC_INDEXNAME = 'share_postrend_backcompat' - - # perpetuated optimizations from times long past - MAX_CHUNK_BYTES = 10 * 1024 ** 2 # 10 megs - - @classmethod - @functools.cache - def _get_elastic5_client(cls) -> elasticsearch5.Elasticsearch: - should_sniff = settings.ELASTICSEARCH['SNIFF'] - return elasticsearch5.Elasticsearch( - settings.ELASTICSEARCH5_URL, - retry_on_timeout=True, - timeout=settings.ELASTICSEARCH['TIMEOUT'], - # sniff before doing anything - sniff_on_start=should_sniff, - # refresh nodes after a node fails to respond - sniff_on_connection_fail=should_sniff, - # and also every 60 seconds - sniffer_timeout=60 if should_sniff else None, - ) - - @property - def es5_client(self): - return self._get_elastic5_client() # cached classmethod for shared client - - @property - def single_index(self) -> IndexStrategy.SpecificIndex: - return self.get_index(self.STATIC_INDEXNAME) - - # abstract method from IndexStrategy - @classmethod - def each_index_subname(self): - yield self.STATIC_INDEXNAME - - # override IndexStrategy - @property - def nonurgent_messagequeue_name(self): - return 'es-share-postrend-backcompat' - - # override IndexStrategy - @property - def urgent_messagequeue_name(self): - return f'{self.nonurgent_messagequeue_name}.urgent' - - # override IndexStrategy - @property - def indexname_prefix(self): - return self.STATIC_INDEXNAME - - # abstract method from IndexStrategy - def compute_strategy_checksum(self): - return ChecksumIri.digest_json( - 'sha-256', - salt=self.__class__.__name__, - raw_json={ - 'indexname': self.STATIC_INDEXNAME, - 'mappings': self._index_mappings(), - 'settings': self._index_settings(), - } - ) - - # abstract method from IndexStrategy - def pls_make_default_for_searching(self): - pass # the one index is the only one - - # abstract method from IndexStrategy - def pls_get_default_for_searching(self): - return self - - # abstract method from IndexStrategy - def each_existing_index(self, *args, **kwargs): - _index = self.single_index - if _index.pls_check_exists(): - yield _index - - # abstract method from IndexStrategy - def each_live_index(self, *args, **kwargs): - yield self.single_index - - # abstract method from IndexStrategy - def each_subnamed_index(self): - yield self.single_index - - # abstract method from IndexStrategy - def pls_handle_messages_chunk(self, messages_chunk): - logger.debug('got messages_chunk %s', messages_chunk) - self.assert_message_type(messages_chunk.message_type) - bulk_stream = elasticsearch5.helpers.streaming_bulk( - self.es5_client, - self._build_elastic_actions(messages_chunk), - max_chunk_bytes=self.MAX_CHUNK_BYTES, - raise_on_error=False, - ) - for (ok, response) in bulk_stream: - op_type, response_body = next(iter(response.items())) - message_target_id = self._get_message_target_id(response_body['_id']) - is_done = ok or (op_type == 'delete' and response_body.get('status') == 404) - error_text = None if is_done else str(response_body) - yield messages.IndexMessageResponse( - is_done=is_done, - index_message=messages.IndexMessage(messages_chunk.message_type, message_target_id), - status_code=response_body.get('status'), - error_text=error_text, - ) - - # abstract method from IndexStrategy - @property - def supported_message_types(self): - return { - messages.MessageType.INDEX_SUID, - messages.MessageType.BACKFILL_SUID, - } - - # abstract method from IndexStrategy - @property - def backfill_message_type(self): - return messages.MessageType.BACKFILL_SUID - - def _index_settings(self): - return { - 'analysis': { - 'filter': { - 'autocomplete_filter': { - 'type': 'edge_ngram', - 'min_gram': 1, - 'max_gram': 20 - } - }, - 'analyzer': { - 'default': { - # same as 'standard' analyzer, plus html_strip - 'type': 'custom', - 'tokenizer': 'standard', - 'filter': ['lowercase', 'stop'], - 'char_filter': ['html_strip'] - }, - 'autocomplete': { - 'type': 'custom', - 'tokenizer': 'standard', - 'filter': [ - 'lowercase', - 'autocomplete_filter' - ] - }, - 'subject_analyzer': { - 'type': 'custom', - 'tokenizer': 'subject_tokenizer', - 'filter': [ - 'lowercase', - ] - }, - 'subject_search_analyzer': { - 'type': 'custom', - 'tokenizer': 'keyword', - 'filter': [ - 'lowercase', - ] - }, - }, - 'tokenizer': { - 'subject_tokenizer': { - 'type': 'path_hierarchy', - 'delimiter': '|', - } - } - } - } - - def _index_mappings(self): - autocomplete_field = { - 'autocomplete': { - 'type': 'string', - 'analyzer': 'autocomplete', - 'search_analyzer': 'standard', - 'include_in_all': False - } - } - exact_field = { - 'exact': { - 'type': 'keyword', - # From Elasticsearch documentation: - # The value for ignore_above is the character count, but Lucene counts bytes. - # If you use UTF-8 text with many non-ASCII characters, you may want to set the limit to 32766 / 3 = 10922 since UTF-8 characters may occupy at most 3 bytes - 'ignore_above': 10922 - } - } - return { - 'creativeworks': { - 'dynamic': 'strict', - 'properties': { - 'affiliations': {'type': 'text', 'fields': exact_field}, - 'contributors': {'type': 'text', 'fields': exact_field}, - 'date': {'type': 'date', 'format': 'strict_date_optional_time', 'include_in_all': False}, - 'date_created': {'type': 'date', 'format': 'strict_date_optional_time', 'include_in_all': False}, - 'date_modified': {'type': 'date', 'format': 'strict_date_optional_time', 'include_in_all': False}, - 'date_published': {'type': 'date', 'format': 'strict_date_optional_time', 'include_in_all': False}, - 'date_updated': {'type': 'date', 'format': 'strict_date_optional_time', 'include_in_all': False}, - 'description': {'type': 'text'}, - 'funders': {'type': 'text', 'fields': exact_field}, - 'hosts': {'type': 'text', 'fields': exact_field}, - 'id': {'type': 'keyword', 'include_in_all': False}, - 'identifiers': {'type': 'text', 'fields': exact_field}, - 'justification': {'type': 'text', 'include_in_all': False}, - 'language': {'type': 'keyword', 'include_in_all': False}, - 'publishers': {'type': 'text', 'fields': exact_field}, - 'registration_type': {'type': 'keyword', 'include_in_all': False}, - 'retracted': {'type': 'boolean', 'include_in_all': False}, - 'source_config': {'type': 'keyword', 'include_in_all': False}, - 'source_unique_id': {'type': 'keyword'}, - 'sources': {'type': 'keyword', 'include_in_all': False}, - 'subjects': {'type': 'text', 'include_in_all': False, 'analyzer': 'subject_analyzer', 'search_analyzer': 'subject_search_analyzer'}, - 'subject_synonyms': {'type': 'text', 'include_in_all': False, 'analyzer': 'subject_analyzer', 'search_analyzer': 'subject_search_analyzer', 'copy_to': 'subjects'}, - 'tags': {'type': 'text', 'fields': exact_field}, - 'title': {'type': 'text', 'fields': exact_field}, - 'type': {'type': 'keyword', 'include_in_all': False}, - 'types': {'type': 'keyword', 'include_in_all': False}, - 'withdrawn': {'type': 'boolean', 'include_in_all': False}, - 'osf_related_resource_types': {'type': 'object', 'dynamic': True, 'include_in_all': False}, - 'lists': {'type': 'object', 'dynamic': True, 'include_in_all': False}, - }, - 'dynamic_templates': [ - {'exact_field_on_lists_strings': {'path_match': 'lists.*', 'match_mapping_type': 'string', 'mapping': {'type': 'text', 'fields': exact_field}}}, - ] - }, - 'agents': { - 'dynamic': False, - 'properties': { - 'id': {'type': 'keyword', 'include_in_all': False}, - 'identifiers': {'type': 'text', 'fields': exact_field}, - 'name': {'type': 'text', 'fields': {**autocomplete_field, **exact_field}}, - 'family_name': {'type': 'text', 'include_in_all': False}, - 'given_name': {'type': 'text', 'include_in_all': False}, - 'additional_name': {'type': 'text', 'include_in_all': False}, - 'suffix': {'type': 'text', 'include_in_all': False}, - 'location': {'type': 'text', 'include_in_all': False}, - 'sources': {'type': 'keyword', 'include_in_all': False}, - 'type': {'type': 'keyword', 'include_in_all': False}, - 'types': {'type': 'keyword', 'include_in_all': False}, - } - }, - 'sources': { - 'dynamic': False, - 'properties': { - 'id': {'type': 'keyword', 'include_in_all': False}, - 'name': {'type': 'text', 'fields': {**autocomplete_field, **exact_field}}, - 'short_name': {'type': 'keyword', 'include_in_all': False}, - 'type': {'type': 'keyword', 'include_in_all': False}, - } - }, - 'tags': { - 'dynamic': False, - 'properties': { - 'id': {'type': 'keyword', 'include_in_all': False}, - 'name': {'type': 'text', 'fields': {**autocomplete_field, **exact_field}}, - 'type': {'type': 'keyword', 'include_in_all': False}, - } - }, - } - - def _get_message_target_id(self, doc_id): - return IDObfuscator.decode_id(doc_id) - - def _build_elastic_actions(self, messages_chunk): - action_template = { - '_index': self.STATIC_INDEXNAME, - '_type': 'creativeworks', - } - suid_ids = set(messages_chunk.target_ids_chunk) - _derived_qs = ( - trove_db.DerivedIndexcard.objects - .filter(upriver_indexcard__source_record_suid_id=suid_ids) - .filter(deriver_identifier__in=( - trove_db.ResourceIdentifier.objects - .queryset_for_iri(SHAREv2.sharev2_elastic) - )) - .select_related('upriver_indexcard') - ) - for _derived_card in _derived_qs: - _suid_id = _derived_card.upriver_indexcard.source_record_suid_id - doc_id = get_doc_id(_suid_id) - suid_ids.remove(_suid_id) - source_doc = json.loads(_derived_card.derived_text) - assert source_doc['id'] == doc_id - if source_doc.pop('is_deleted', False): - action = { - **action_template, - '_id': doc_id, - '_op_type': 'delete', - } - else: - action = { - **action_template, - '_id': doc_id, - '_op_type': 'index', - '_source': source_doc, - } - logger.debug('built action for suid_id=%s: %s', _suid_id, action) - yield action - # delete any that don't have the expected DerivedIndexcard - for leftover_suid_id in suid_ids: - logger.debug('deleting suid_id=%s', leftover_suid_id) - action = { - **action_template, - '_id': get_doc_id(leftover_suid_id), - '_op_type': 'delete', - } - yield action - - # optional method from IndexStrategy - def pls_handle_search__passthru(self, request_body=None, request_queryparams=None) -> dict: - '''the definitive sharev2-search api: passthru to elasticsearch version 5 - ''' - if request_queryparams: - request_queryparams.pop('indexStrategy', None) - try: - return self.es5_client.search( - index=self.STATIC_INDEXNAME, - body=request_body or {}, - params=request_queryparams or {}, - ) - except elasticsearch5.TransportError as error: - raise exceptions.IndexStrategyError() from error # TODO: error messaging - - class SpecificIndex(IndexStrategy.SpecificIndex): - index_strategy: Sharev2Elastic5IndexStrategy # narrow type - - # override IndexStrategy.SpecificIndex - @property - def full_index_name(self): - return self.index_strategy.STATIC_INDEXNAME - - # abstract method from IndexStrategy.SpecificIndex - def pls_create(self): - # check index exists (if not, create) - logger.debug('Ensuring index %s', self.full_index_name) - indices_api = self.index_strategy.es5_client.indices - if not indices_api.exists(index=self.full_index_name): - indices_api.create( - self.full_index_name, - body={ - 'settings': self.index_strategy._index_settings(), - 'mappings': self.index_strategy._index_mappings(), - }, - ) - self.pls_refresh() - logger.debug('Waiting for yellow status') - ( - self.index_strategy.es5_client.cluster - .health(wait_for_status='yellow') - ) - logger.info('Finished setting up Elasticsearch index %s', self.full_index_name) - - # abstract method from IndexStrategy.SpecificIndex - def pls_start_keeping_live(self): - pass # there is just the one index, always kept live - - # abstract method from IndexStrategy.SpecificIndex - def pls_stop_keeping_live(self): - raise exceptions.IndexStrategyError( - f'{self.__class__.__qualname__} is implemented for only one index, ' - f'"{self.full_index_name}", which is always kept live (until elasticsearch5 ' - 'support is dropped)' - ) - - # abstract method from IndexStrategy.SpecificIndex - def pls_refresh(self): - ( - self.index_strategy.es5_client.indices - .refresh(index=self.full_index_name) - ) - logger.info('Refreshed index %s', self.full_index_name) - - # abstract method from IndexStrategy.SpecificIndex - def pls_delete(self): - logger.warning(f'{self.__class__.__name__}: deleting index {self.full_index_name}') - ( - self.index_strategy.es5_client.indices - .delete(index=self.full_index_name, ignore=[400, 404]) - ) - - # abstract method from IndexStrategy.SpecificIndex - def pls_check_exists(self): - return bool( - self.index_strategy.es5_client.indices - .exists(index=self.full_index_name) - ) - - # abstract method from IndexStrategy.SpecificIndex - def pls_get_status(self) -> IndexStatus: - try: - stats = ( - self.index_strategy.es5_client.indices - .stats(index=self.full_index_name, metric='docs') - ) - existing_indexes = ( - self.index_strategy.es5_client.indices - .get_settings(index=self.full_index_name, name='index.creation_date') - ) - index_settings = existing_indexes[self.full_index_name] - index_stats = stats['indices'][self.full_index_name] - except (KeyError, elasticsearch5.exceptions.NotFoundError): - # not yet created - return IndexStatus( - index_subname=self.subname, - specific_indexname=self.full_index_name, - is_kept_live=False, - is_default_for_searching=False, - creation_date='', - doc_count=0, - ) - return IndexStatus( - index_subname=self.subname, - specific_indexname=self.full_index_name, - is_kept_live=True, - is_default_for_searching=True, - creation_date=timestamp_to_readable_datetime( - index_settings['settings']['index']['creation_date'], - ), - doc_count=index_stats['primaries']['docs']['count'], - ) diff --git a/share/search/index_strategy/trovesearch_denorm.py b/share/search/index_strategy/trovesearch_denorm.py index 19cea8d80..4bea2a932 100644 --- a/share/search/index_strategy/trovesearch_denorm.py +++ b/share/search/index_strategy/trovesearch_denorm.py @@ -31,12 +31,10 @@ ) from trove.trovesearch.search_params import ( CardsearchParams, - Propertypath, SearchFilter, SearchText, ValueType, ValuesearchParams, - is_globpath, ) from trove.trovesearch.search_handle import ( CardsearchHandle, @@ -46,6 +44,10 @@ ValuesearchHandle, ValuesearchResult, ) +from trove.util.propertypath import ( + is_globpath, + Propertypath, +) from trove.vocab import osfmap from trove.vocab.namespaces import OWL, RDF from . import _trovesearch_util as ts diff --git a/share/search/messages.py b/share/search/messages.py index ae6fbc670..d2af74901 100644 --- a/share/search/messages.py +++ b/share/search/messages.py @@ -7,7 +7,6 @@ import typing from share.search import exceptions -from share.util import chunked logger = logging.getLogger(__name__) @@ -89,16 +88,6 @@ def as_tuples(self): def timestamp(self) -> int: return time.time_ns() - @classmethod - def stream_chunks( - cls, - message_type: MessageType, - id_stream: typing.Iterable[int], - chunk_size: int, - ) -> 'typing.Iterable[MessagesChunk]': - for id_chunk in chunked(id_stream, chunk_size): - yield cls(message_type, id_chunk) - class DaemonMessage(abc.ABC): PROTOCOL_VERSION = None @@ -108,7 +97,11 @@ def compose_however(message_type: typing.Union[int, MessageType], target_id: int '''pass-thru to PreferedDaemonMessageSubclass.compose ''' assert isinstance(target_id, int) - return V3Message.compose(message_type, target_id) + return V3Message.compose(( + MessageType.from_int(message_type) + if isinstance(message_type, int) + else message_type + ), target_id) @classmethod def from_received_message(cls, kombu_message): @@ -212,7 +205,7 @@ def compose(cls, message_type: MessageType, target_id: int) -> dict: } @property - def _message_twople(self) -> (int, int): + def _message_twople(self) -> tuple[int, int]: return self.kombu_message.payload['m'] @property diff --git a/share/util/__init__.py b/share/util/__init__.py index bd9aa831a..45f535f7f 100644 --- a/share/util/__init__.py +++ b/share/util/__init__.py @@ -1,14 +1,6 @@ -from collections import OrderedDict import re -WHITESPACE_RE = r'\s+' - - -def strip_whitespace(string): - return re.sub(WHITESPACE_RE, ' ', string).strip() - - class InvalidID(Exception): def __init__(self, value, message='Invalid ID'): super().__init__(value, message) @@ -24,7 +16,7 @@ class IDObfuscator: ID_RE = re.compile(r'([0-9A-Fa-f]{2,})([0-9A-Fa-f]{3})-([0-9A-Fa-f]{3})-([0-9A-Fa-f]{3})') @classmethod - def encode(cls, instance): + def encode(cls, instance) -> str: return cls.encode_id(instance.id, instance._meta.model) @classmethod @@ -92,113 +84,3 @@ def get_instance_from_id(cls, model_class, id): return IDObfuscator.resolve(id) except InvalidID: return model_class.objects.get(id=id) - - -class CyclicalDependency(Exception): - pass - - -class TopologicalSorter: - """Sort a list of nodes topologically, so a node is always preceded by its dependencies. - - Params: - - `nodes`: Iterable of objects - - `dependencies`: Callable that takes a single argument (a node) and returns an iterable - of nodes that should precede it (or keys, if `key` is given) - - `key`: Callable that takes a single argument (a node) and returns a unique key. - If omitted, nodes will be compared for equality directly. - """ - - def __init__(self, nodes, dependencies, key=None): - self.__sorted = [] - self.__nodes = list(nodes) - self.__visited = set() - self.__visiting = set() - self.__dependencies = dependencies - self.__key = key - self.__node_map = {key(n): n for n in nodes} if key else None - - def sorted(self): - if not self.__nodes: - return self.__sorted - - while self.__nodes: - n = self.__nodes.pop(0) - self.__visit(n) - - return self.__sorted - - def __visit(self, node): - key = self.__key(node) if self.__key else node - if key in self.__visiting: - raise CyclicalDependency(key, self.__visiting) - - if key in self.__visited: - return - - self.__visiting.add(key) - for k in self.__dependencies(node): - if k is not None and k is not key: - self.__visit(self.__get_node(k)) - - self.__visited.add(key) - self.__sorted.append(node) - self.__visiting.remove(key) - - def __get_node(self, key): - return self.__node_map[key] if self.__node_map else key - - -class DictHashingDict: - # A wrapper around dicts that can have dicts as keys - - def __init__(self): - self.__inner = {} - - def get(self, key, *args): - return self.__inner.get(self._hash(key), *args) - - def pop(self, key, *args): - return self.__inner.pop(self._hash(key), *args) - - def setdefault(self, key, *args): - return self.__inner.setdefault(self._hash(key), *args) - - def __getitem__(self, key): - return self.__inner[self._hash(key)] - - def __setitem__(self, key, value): - self.__inner[self._hash(key)] = value - - def __contains__(self, key): - return self._hash(key) in self.__inner - - def _hash(self, val): - if isinstance(val, dict): - if not isinstance(val, OrderedDict): - val = tuple((k, self._hash(v)) for k, v in sorted(val.items(), key=lambda x: x[0])) - else: - val = tuple((k, self._hash(v)) for k, v in val.items()) - if isinstance(val, (list, tuple)): - val = tuple(self._hash(v) for v in val) - return val - - -def chunked(iterable, size=25, fail_fast=False): - iterable = iter(iterable) - try: - while True: - chunk = [] - for _ in range(size): - chunk.append(next(iterable)) - yield chunk - except StopIteration: - yield chunk - except Exception as e: - if not fail_fast and chunk: - yield chunk - raise e - - -def placeholders(length): - return ', '.join('%s' for _ in range(length)) diff --git a/share/util/checksum_iri.py b/share/util/checksum_iri.py index 552aeb91c..b0dafa908 100644 --- a/share/util/checksum_iri.py +++ b/share/util/checksum_iri.py @@ -1,9 +1,18 @@ +from __future__ import annotations +from collections.abc import Callable import dataclasses import hashlib import json +from typing import Self, Any, TYPE_CHECKING +if TYPE_CHECKING: + from trove.util.json import JsonValue -def _ensure_bytes(bytes_or_something) -> bytes: + +type HexdigestFn = Callable[[str | bytes, str | bytes], str] + + +def _ensure_bytes(bytes_or_something: bytes | str) -> bytes: if isinstance(bytes_or_something, bytes): return bytes_or_something if isinstance(bytes_or_something, str): @@ -11,12 +20,12 @@ def _ensure_bytes(bytes_or_something) -> bytes: raise NotImplementedError(f'how bytes? ({bytes_or_something})') -def _builtin_checksum(hash_constructor): +def _builtin_checksum(hash_constructor: Any) -> HexdigestFn: def hexdigest_fn(salt: str | bytes, data: str | bytes) -> str: hasher = hash_constructor() hasher.update(_ensure_bytes(salt)) hasher.update(_ensure_bytes(data)) - return hasher.hexdigest() + return str(hasher.hexdigest()) return hexdigest_fn @@ -33,11 +42,11 @@ class ChecksumIri: salt: str hexdigest: str - def __str__(self): + def __str__(self) -> str: return f'urn:checksum:{self.checksumalgorithm_name}:{self.salt}:{self.hexdigest}' @classmethod - def digest(cls, checksumalgorithm_name: str, *, salt: str, data: str): + def digest(cls, checksumalgorithm_name: str, *, salt: str, data: str) -> Self: try: hexdigest_fn = CHECKSUM_ALGORITHMS[checksumalgorithm_name] except KeyError: @@ -52,7 +61,7 @@ def digest(cls, checksumalgorithm_name: str, *, salt: str, data: str): ) @classmethod - def digest_json(cls, checksumalgorithm_name, *, salt, raw_json): + def digest_json(cls, checksumalgorithm_name: str, *, salt: str, raw_json: JsonValue) -> Self: return cls.digest( checksumalgorithm_name, salt=salt, @@ -60,7 +69,7 @@ def digest_json(cls, checksumalgorithm_name, *, salt, raw_json): ) @classmethod - def from_iri(cls, checksum_iri: str): + def from_iri(cls, checksum_iri: str) -> Self: try: (urn, checksum, algorithmname, salt, hexdigest) = checksum_iri.split(':') assert (urn, checksum) == ('urn', 'checksum') diff --git a/share/version.py b/share/version.py index 191d57ff7..becab491e 100644 --- a/share/version.py +++ b/share/version.py @@ -1,5 +1,10 @@ -__version__ = '25.4.0' +import functools +import importlib.metadata -def get_share_version() -> str: - return __version__ +__all__ = ('get_shtrove_version',) + + +@functools.cache +def get_shtrove_version() -> str: + return importlib.metadata.version('shtrove') diff --git a/tests/api/test_sitebanners.py b/tests/api/test_sitebanners.py index 95bbea6fa..a905403c5 100644 --- a/tests/api/test_sitebanners.py +++ b/tests/api/test_sitebanners.py @@ -1,7 +1,6 @@ import pytest from share.models import SiteBanner -from share.util import IDObfuscator from tests.factories import ShareUserFactory @@ -31,7 +30,7 @@ def test_list_with_items(self, client): assert resp.status_code == 200 assert resp.json() == { 'data': [{ - 'id': IDObfuscator.encode(banner), + 'id': str(banner.id), 'type': 'SiteBanner', 'attributes': { 'color': 'info', @@ -45,12 +44,3 @@ def test_list_with_items(self, client): 'prev': None, }, } - - # def test_get_item(self, client): - # resp = client.get('/api/v2/site_banners/') - # assert resp.status_code == 200 - # assert resp.json() == { - # 'data': [], - # 'meta': { - # } - # } diff --git a/tests/api/test_sources_endpoint.py b/tests/api/test_sources_endpoint.py index 5c9cbce7c..61f78a2c5 100644 --- a/tests/api/test_sources_endpoint.py +++ b/tests/api/test_sources_endpoint.py @@ -132,15 +132,15 @@ def test_is_deleted(self, client, sources): source_ids_after = {s['id'] for s in sources_after} assert len(sources_after) == len(sources_before) - 1 - missing_ids = source_ids_before - source_ids_after - assert missing_ids == {IDObfuscator.encode(deleted_source)} + missing_ids = {int(i) for i in source_ids_before - source_ids_after} + assert missing_ids == {deleted_source.id} def test_by_id(self, client, sources): source = Source.objects.exclude(is_deleted=True).last() resp = client.get('{}{}/'.format(self.endpoint, IDObfuscator.encode(source))) assert resp.status_code == 200 - assert IDObfuscator.load(resp.json()['data']['id']) == source + assert int(resp.json()['data']['id']) == source.id assert resp.json()['data']['type'] == 'Source' assert resp.json()['data']['attributes'] == { 'name': source.name, @@ -180,8 +180,7 @@ def test_successful_post_no_home_page(self, client, source_add_user): data = flatten_write_response(resp) created_label = data['source']['longTitle'].replace(' ', '_').lower() - created_user = ShareUser.objects.get(pk=IDObfuscator.decode_id(data['user']['id'])) - + created_user = ShareUser.objects.get(pk=data['user']['id']) assert data['source']['longTitle'] == test_data['data']['attributes']['long_title'] assert data['source']['name'] == created_label assert data['source']['homePage'] is None @@ -225,9 +224,13 @@ def test_successful_repost_home_page(self, client, source_add_user): ) assert resp_two.status_code == 409 - data_two = flatten_write_response(resp_two) - - assert data_one == data_two + resp_two_json = resp_two.json() + if "data" in resp_two_json: + data_two = flatten_write_response(resp_two) + assert data_one == data_two + else: + if "errors" in resp_two_json: + assert resp_two_json['errors']['errors'][0]['detail'] == 'That resource already exists.' def test_successful_post_put_home_page(self, client, source_add_change_user): test_data = get_post_body(home_page='http://test.homepage.net') diff --git a/tests/share/search/conftest.py b/tests/share/search/conftest.py index 3cba6ba08..0b10d906f 100644 --- a/tests/share/search/conftest.py +++ b/tests/share/search/conftest.py @@ -6,12 +6,8 @@ @pytest.fixture def mock_elastic_clients(settings): # set elastic urls to non-empty but non-usable values - settings.ELASTICSEARCH5_URL = 'fake://bleh' settings.ELASTICSEARCH8_URL = 'fake://bluh' - with mock.patch('share.search.index_strategy.sharev2_elastic5.elasticsearch5'): - with mock.patch('share.search.index_strategy.elastic8.elasticsearch8'): - yield + with mock.patch('share.search.index_strategy.elastic8.elasticsearch8'): + yield from share.search.index_strategy.elastic8 import Elastic8IndexStrategy Elastic8IndexStrategy._get_elastic8_client.cache_clear() - from share.search.index_strategy.sharev2_elastic5 import Sharev2Elastic5IndexStrategy - Sharev2Elastic5IndexStrategy._get_elastic5_client.cache_clear() diff --git a/tests/share/search/index_strategy/test_sharev2_elastic5.py b/tests/share/search/index_strategy/test_sharev2_elastic5.py deleted file mode 100644 index 8d0d84e73..000000000 --- a/tests/share/search/index_strategy/test_sharev2_elastic5.py +++ /dev/null @@ -1,87 +0,0 @@ -import unittest - -from django.conf import settings -from primitive_metadata import primitive_rdf as rdf - -from share.search import messages -from share.search.index_strategy.sharev2_elastic5 import Sharev2Elastic5IndexStrategy -from tests.trove.factories import create_indexcard -from trove.vocab.namespaces import DCTERMS, SHAREv2, RDF, BLARG -from ._with_real_services import RealElasticTestCase - - -@unittest.skipUnless(settings.ELASTICSEARCH5_URL, 'missing ELASTICSEARCH5_URL setting') -class TestSharev2Elastic5(RealElasticTestCase): - # for RealElasticTestCase - def get_index_strategy(self): - index_strategy = Sharev2Elastic5IndexStrategy('test_sharev2_elastic5') - if not index_strategy.STATIC_INDEXNAME.startswith('test_'): - index_strategy.STATIC_INDEXNAME = f'test_{index_strategy.STATIC_INDEXNAME}' - return index_strategy - - def setUp(self): - super().setUp() - self.__indexcard = create_indexcard( - BLARG.hello, - { - RDF.type: {SHAREv2.CreativeWork}, - DCTERMS.title: {rdf.literal('hello', language='en')}, - }, - deriver_iris=[SHAREv2.sharev2_elastic], - ) - - def test_without_daemon(self): - _formatted_record = self._get_formatted_record() - _messages_chunk = messages.MessagesChunk( - messages.MessageType.INDEX_SUID, - [_formatted_record.suid_id], - ) - self._assert_happypath_without_daemon( - _messages_chunk, - expected_doc_count=1, - ) - - def test_with_daemon(self): - _formatted_record = self._get_formatted_record() - _messages_chunk = messages.MessagesChunk( - messages.MessageType.INDEX_SUID, - [_formatted_record.suid_id], - ) - self._assert_happypath_with_daemon( - _messages_chunk, - expected_doc_count=1, - ) - - # override RealElasticTestCase to match hacks done with assumptions - # (single index that will not be updated again before being deleted) - def _assert_happypath_until_ingest(self): - # initial - _index = next(self.index_strategy.each_subnamed_index()) - assert not _index.pls_check_exists() - index_status = _index.pls_get_status() - assert not index_status.creation_date - assert not index_status.is_kept_live - assert not index_status.is_default_for_searching - assert not index_status.doc_count - # create index - _index.pls_create() - assert _index.pls_check_exists() - index_status = _index.pls_get_status() - assert index_status.creation_date - assert index_status.is_kept_live # change from base class - assert index_status.is_default_for_searching # change from base class - assert not index_status.doc_count - # keep index live (with ingested updates) - self.index_strategy.pls_start_keeping_live() # now a no-op - index_status = _index.pls_get_status() - assert index_status.creation_date - assert index_status.is_kept_live - assert index_status.is_default_for_searching # change from base class - assert not index_status.doc_count - # default index for searching - self.index_strategy.pls_make_default_for_searching() # now a no-op - index_status = _index.pls_get_status() - assert index_status.creation_date - assert index_status.is_kept_live - assert index_status.is_default_for_searching - assert not index_status.doc_count diff --git a/tests/share/search/index_strategy/test_strategy_selection.py b/tests/share/search/index_strategy/test_strategy_selection.py index a017bc2ba..5f5e1df48 100644 --- a/tests/share/search/index_strategy/test_strategy_selection.py +++ b/tests/share/search/index_strategy/test_strategy_selection.py @@ -6,7 +6,6 @@ IndexStrategy, each_strategy, get_strategy, - sharev2_elastic5, sharev2_elastic8, trovesearch_denorm, parse_strategy_name, @@ -18,7 +17,6 @@ @pytest.fixture def patched_strategies(mock_elastic_clients): _strategies = [ - sharev2_elastic5.Sharev2Elastic5IndexStrategy('sharev2_elastic5'), sharev2_elastic8.Sharev2Elastic8IndexStrategy('sharev2_elastic8'), trovesearch_denorm.TrovesearchDenormIndexStrategy('trovesearch_denorm'), ] diff --git a/tests/trove/render/test_jsonld_renderer.py b/tests/trove/render/test_jsonld_renderer.py index 75b92f9ff..eef657f1d 100644 --- a/tests/trove/render/test_jsonld_renderer.py +++ b/tests/trove/render/test_jsonld_renderer.py @@ -26,12 +26,10 @@ class TestJsonldRenderer(_base.TroveJsonRendererTests): "@value": "2024-01-01" } ], - "foaf:primaryTopic": [ - "blarg:anItem" - ], + "foaf:primaryTopic": [{"@id": "blarg:anItem"}], "rdf:type": [ - "trove:Indexcard", - "dcat:CatalogRecord" + {"@id": "trove:Indexcard"}, + {"@id": "dcat:CatalogRecord"} ], "trove:focusIdentifier": [ { @@ -61,7 +59,7 @@ class TestJsonldRenderer(_base.TroveJsonRendererTests): } ], "blarg:hasIri": [ - "blarg:anIri" + {"@id": "blarg:anIri"} ], "blarg:hasRdfLangStringLiteral": [ { @@ -80,7 +78,7 @@ class TestJsonldRenderer(_base.TroveJsonRendererTests): "@value": "a literal of strange datatype" } ], - "rdf:type": ["blarg:aType"], + "rdf:type": [{"@id": "blarg:aType"}], }), ), } @@ -95,7 +93,7 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): rendered_content=json.dumps({ "@id": "blarg:aSearch", "rdf:type": [ - "trove:Cardsearch" + {"@id": "trove:Cardsearch"} ], "trove:totalResultCount": { "@type": "xsd:integer", @@ -108,7 +106,7 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): rendered_content=json.dumps({ "@id": "blarg:aSearchFew", "rdf:type": [ - "trove:Cardsearch" + {"@id": "trove:Cardsearch"} ], "trove:totalResultCount": { "@type": "xsd:integer", @@ -119,7 +117,7 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): "@list": [ { "rdf:type": [ - "trove:SearchResult" + {"@id": "trove:SearchResult"} ], "trove:indexCard": { "@id": "blarg:aCard", @@ -136,16 +134,14 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): } ], "foaf:primaryTopic": [ - "blarg:anItem" + {"@id": "blarg:anItem"} ], "rdf:type": [ - "trove:Indexcard", - "dcat:CatalogRecord" + {"@id": "trove:Indexcard"}, + {"@id": "dcat:CatalogRecord"} ], "trove:focusIdentifier": [ - { - "@value": BLARG.anItem - } + {"@value": BLARG.anItem} ], "trove:resourceMetadata": { "@id": BLARG.anItem, @@ -155,7 +151,7 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): }, { "rdf:type": [ - "trove:SearchResult" + {"@id": "trove:SearchResult"} ], "trove:indexCard": { "@id": "blarg:aCardd", @@ -172,11 +168,11 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): } ], "foaf:primaryTopic": [ - "blarg:anItemm" + {"@id": "blarg:anItemm"} ], "rdf:type": [ - "trove:Indexcard", - "dcat:CatalogRecord" + {"@id": "trove:Indexcard"}, + {"@id": "dcat:CatalogRecord"} ], "trove:focusIdentifier": [ { @@ -191,7 +187,7 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): }, { "rdf:type": [ - "trove:SearchResult" + {"@id": "trove:SearchResult"} ], "trove:indexCard": { "@id": "blarg:aCarddd", @@ -208,16 +204,14 @@ class TestJsonldSearchRenderer(_base.TrovesearchJsonRendererTests): } ], "foaf:primaryTopic": [ - "blarg:anItemmm" + {"@id": "blarg:anItemmm"} ], "rdf:type": [ - "trove:Indexcard", - "dcat:CatalogRecord" + {"@id": "trove:Indexcard"}, + {"@id": "dcat:CatalogRecord"} ], "trove:focusIdentifier": [ - { - "@value": BLARG.anItemmm - } + {"@value": BLARG.anItemmm} ], "trove:resourceMetadata": { "@id": BLARG.anItemmm, diff --git a/trove/admin.py b/trove/admin.py index 5ef20eac3..4df52b10c 100644 --- a/trove/admin.py +++ b/trove/admin.py @@ -1,3 +1,5 @@ +from __future__ import annotations +from typing import Any from django.contrib import admin from django.utils.html import format_html @@ -52,9 +54,9 @@ class IndexcardAdmin(admin.ModelAdmin): list_filter = ('deleted', 'source_record_suid__source_config') actions = ('_freshen_index',) - def _freshen_index(self, request, queryset): + def _freshen_index(self, queryset: list[Indexcard]) -> None: IndexMessenger().notify_indexcard_update(queryset) - _freshen_index.short_description = 'freshen indexcard in search index' + _freshen_index.short_description = 'freshen indexcard in search index' # type: ignore[attr-defined] @admin.register(LatestResourceDescription, site=admin_site) @@ -73,9 +75,9 @@ class LatestResourceDescriptionAdmin(admin.ModelAdmin): list_select_related = ('indexcard',) show_full_result_count = False - def rdf_as_turtle__pre(self, instance): + def rdf_as_turtle__pre(self, instance: Any) -> str: return format_html('
{}
', instance.rdf_as_turtle) - rdf_as_turtle__pre.short_description = 'rdf as turtle' + rdf_as_turtle__pre.short_description = 'rdf as turtle' # type: ignore[attr-defined] @admin.register(ArchivedResourceDescription, site=admin_site) @@ -94,9 +96,9 @@ class ArchivedResourceDescriptionAdmin(admin.ModelAdmin): list_select_related = ('indexcard',) show_full_result_count = False - def rdf_as_turtle__pre(self, instance): + def rdf_as_turtle__pre(self, instance: Any) -> str: return format_html('
{}
', instance.rdf_as_turtle) - rdf_as_turtle__pre.short_description = 'rdf as turtle' + rdf_as_turtle__pre.short_description = 'rdf as turtle' # type: ignore[attr-defined] @admin.register(SupplementaryResourceDescription, site=admin_site) @@ -116,9 +118,9 @@ class SupplementaryResourceDescriptionAdmin(admin.ModelAdmin): list_select_related = ('indexcard',) show_full_result_count = False - def rdf_as_turtle__pre(self, instance): + def rdf_as_turtle__pre(self, instance: SupplementaryResourceDescription) -> str: return format_html('
{}
', instance.rdf_as_turtle) - rdf_as_turtle__pre.short_description = 'rdf as turtle' + rdf_as_turtle__pre.short_description = 'rdf as turtle' # type: ignore[attr-defined] @admin.register(DerivedIndexcard, site=admin_site) diff --git a/trove/derive/__init__.py b/trove/derive/__init__.py index 3cdd089f7..1f7d24c13 100644 --- a/trove/derive/__init__.py +++ b/trove/derive/__init__.py @@ -1,8 +1,14 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + from . import ( sharev2_elastic, osfmap_json_mini, oaidc_xml, osfmap_json, ) +if TYPE_CHECKING: + from collections.abc import Iterable + from ._base import IndexcardDeriver DERIVER_SET = ( sharev2_elastic.ShareV2ElasticDeriver, @@ -15,18 +21,20 @@ # property_label? ) -DEFAULT_DERIVER_SET = ( +DEFAULT_DERIVER_SET: tuple[type[IndexcardDeriver], ...] = ( sharev2_elastic.ShareV2ElasticDeriver, osfmap_json_mini.OsfmapJsonMiniDeriver, oaidc_xml.OaiDcXmlDeriver, ) -def get_deriver_classes(deriver_iri_filter=None): +def get_deriver_classes( + deriver_iri_filter: Iterable[str] | None = None, +) -> tuple[type[IndexcardDeriver], ...]: if deriver_iri_filter is None: return DEFAULT_DERIVER_SET - return [ + return tuple( _deriver_class for _deriver_class in DERIVER_SET if _deriver_class.deriver_iri() in deriver_iri_filter - ] + ) diff --git a/trove/derive/_base.py b/trove/derive/_base.py index bc8d8b583..ea2a11c16 100644 --- a/trove/derive/_base.py +++ b/trove/derive/_base.py @@ -1,5 +1,5 @@ import abc - +from typing import Any from primitive_metadata import primitive_rdf from trove.models.resource_description import ResourceDescription @@ -15,7 +15,7 @@ def __init__(self, upstream_description: ResourceDescription): self.focus_iri = upstream_description.focus_iri self.data = upstream_description.as_rdfdoc_with_supplements() - def q(self, pathset): + def q(self, pathset: Any) -> Any: # convenience for querying self.data on self.focus_iri return self.data.q(self.focus_iri, pathset) diff --git a/trove/derive/oaidc_xml.py b/trove/derive/oaidc_xml.py index f22caa4dc..610fb49fc 100644 --- a/trove/derive/oaidc_xml.py +++ b/trove/derive/oaidc_xml.py @@ -1,3 +1,4 @@ +from typing import Any from lxml import etree from primitive_metadata import primitive_rdf as rdf @@ -63,7 +64,7 @@ def should_skip(self) -> bool: return _allowed_focustype_iris.isdisjoint(_focustype_iris) # abstract method from IndexcardDeriver - def derive_card_as_text(self): + def derive_card_as_text(self) -> Any: _dc_element = self._derive_card_as_xml() return etree.tostring(_dc_element, encoding='unicode') diff --git a/trove/derive/osfmap_json.py b/trove/derive/osfmap_json.py index 4e8147483..69de39b26 100644 --- a/trove/derive/osfmap_json.py +++ b/trove/derive/osfmap_json.py @@ -1,5 +1,7 @@ +from __future__ import annotations import datetime import json +from typing import TYPE_CHECKING from primitive_metadata import primitive_rdf as rdf @@ -10,13 +12,17 @@ osfmap_json_shorthand, ) from ._base import IndexcardDeriver +if TYPE_CHECKING: + from trove.util.json import JsonValue, JsonObject class OsfmapJsonFullDeriver(IndexcardDeriver): # abstract method from IndexcardDeriver @staticmethod def deriver_iri() -> str: - return TROVE['derive/osfmap_json_full'] + _iri = TROVE['derive/osfmap_json_full'] + assert isinstance(_iri, str) + return _iri # abstract method from IndexcardDeriver @staticmethod @@ -28,7 +34,7 @@ def should_skip(self) -> bool: return False # abstract method from IndexcardDeriver - def derive_card_as_text(self): + def derive_card_as_text(self) -> str: return json.dumps( _RdfOsfmapJsonldRenderer().tripledict_as_nested_jsonld( self.data.tripledict, @@ -38,13 +44,13 @@ def derive_card_as_text(self): class _RdfOsfmapJsonldRenderer: - __nestvisiting_iris: set + __nestvisiting_iris: set[str] - def tripledict_as_nested_jsonld(self, tripledict: rdf.RdfTripleDictionary, focus_iri: str): + def tripledict_as_nested_jsonld(self, tripledict: rdf.RdfTripleDictionary, focus_iri: str) -> JsonObject: self.__nestvisiting_iris = set() return self.__nested_rdfobject_as_jsonld(tripledict, focus_iri) - def rdfobject_as_jsonld(self, rdfobject: rdf.RdfObject) -> dict: + def rdfobject_as_jsonld(self, rdfobject: rdf.RdfObject) -> JsonObject: if isinstance(rdfobject, frozenset): return self.twopledict_as_jsonld( rdf.twopledict_from_twopleset(rdfobject), @@ -87,7 +93,7 @@ def rdfobject_as_jsonld(self, rdfobject: rdf.RdfObject) -> dict: ]} raise trove_exceptions.UnsupportedRdfObject(rdfobject) - def twopledict_as_jsonld(self, twopledict: rdf.RdfTwopleDictionary) -> dict: + def twopledict_as_jsonld(self, twopledict: rdf.RdfTwopleDictionary) -> JsonObject: _jsonld = {} for _pred, _objectset in twopledict.items(): if _objectset: @@ -102,7 +108,7 @@ def __nested_rdfobject_as_jsonld( self, tripledict: rdf.RdfTripleDictionary, rdfobject: rdf.RdfObject, - ): + ) -> JsonObject: _yes_nest = ( isinstance(rdfobject, str) and (rdfobject not in self.__nestvisiting_iris) @@ -129,7 +135,7 @@ def __nested_rdfobject_as_jsonld( self.__nestvisiting_iris.discard(rdfobject) return _nested_obj - def _list_or_single_value(self, predicate_iri, json_list: list): + def _list_or_single_value(self, predicate_iri: str, json_list: list[JsonValue]) -> JsonValue: _only_one_object = OWL.FunctionalProperty in ( OSFMAP_THESAURUS .get(predicate_iri, {}) diff --git a/trove/derive/osfmap_json_mini.py b/trove/derive/osfmap_json_mini.py index cd4520f62..ad043e419 100644 --- a/trove/derive/osfmap_json_mini.py +++ b/trove/derive/osfmap_json_mini.py @@ -1,6 +1,11 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + from trove.vocab import namespaces as ns from trove.derive.osfmap_json import OsfmapJsonFullDeriver from trove.vocab.namespaces import TROVE +if TYPE_CHECKING: + from trove.models.resource_description import ResourceDescription EXCLUDED_PREDICATE_SET = frozenset({ ns.OSFMAP.contains, @@ -8,15 +13,15 @@ class OsfmapJsonMiniDeriver(OsfmapJsonFullDeriver): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, upstream_description: ResourceDescription): + super().__init__(upstream_description) self.convert_tripledict() @staticmethod def deriver_iri() -> str: return TROVE['derive/osfmap_json'] - def convert_tripledict(self): + def convert_tripledict(self) -> None: self.data.tripledict = { _subj: _new_twopledict for _subj, _old_twopledict in self.data.tripledict.items() diff --git a/trove/derive/sharev2_elastic.py b/trove/derive/sharev2_elastic.py index 27c7b3a06..ab65d757e 100644 --- a/trove/derive/sharev2_elastic.py +++ b/trove/derive/sharev2_elastic.py @@ -1,6 +1,7 @@ import datetime import json import re +from typing import Union, Dict, Any, List, Tuple, Optional, Set from primitive_metadata import primitive_rdf @@ -98,7 +99,10 @@ EMPTY_VALUES = (None, '', []) # type: ignore[var-annotated] -def strip_empty_values(thing): +JSONLike = Union[Dict[str, Any], List[Any], Tuple[Any, ...], Any] + + +def strip_empty_values(thing: JSONLike) -> JSONLike: if isinstance(thing, dict): return { k: strip_empty_values(v) @@ -145,7 +149,7 @@ def should_skip(self) -> bool: return _allowed_focustype_iris.isdisjoint(_focustype_iris) # abstract method from IndexcardDeriver - def derive_card_as_text(self): + def derive_card_as_text(self) -> str: _suid = self.upstream_description.indexcard.source_record_suid try: # maintain doc id in the sharev2 index _suid = _suid.get_backcompat_sharev2_suid() @@ -204,7 +208,7 @@ def derive_card_as_text(self): sort_keys=True, ) - def _related_names(self, *predicate_iris): + def _related_names(self, *predicate_iris: Tuple[Dict[str, Any]]) -> List[None | str | Any]: _obj_iter = self.data.q( self.focus_iri, { @@ -217,7 +221,7 @@ def _related_names(self, *predicate_iris): for _obj in _obj_iter ] - def _single_date(self, *predicate_iris, focus_iri=None): + def _single_date(self, *predicate_iris: Tuple[Any], focus_iri: Optional[str] = None) -> None | str | Any: _val = self._single_value(*predicate_iris, focus_iri=focus_iri) if isinstance(_val, primitive_rdf.Literal): return _val.unicode_value @@ -225,10 +229,10 @@ def _single_date(self, *predicate_iris, focus_iri=None): return _val.isoformat() return _val - def _single_string(self, *predicate_iris, focus_iri=None): + def _single_string(self, *predicate_iris: Tuple[Any], focus_iri: Optional[str] = None) -> None | str | Any: return _obj_to_string_or_none(self._single_value(*predicate_iris, focus_iri=focus_iri)) - def _single_value(self, *predicate_iris, focus_iri=None): + def _single_value(self, *predicate_iris: Tuple[Any], focus_iri: Optional[str] = None) -> None | str | Any: # for sharev2 back-compat, some fields must have a single value # (tho now the corresponding rdf property may have many values) for _pred in predicate_iris: @@ -242,7 +246,7 @@ def _single_value(self, *predicate_iris, focus_iri=None): continue return None - def _string_list(self, *predicate_paths, focus_iri=None): + def _string_list(self, *predicate_paths: Tuple[Any], focus_iri: Optional[str] = None) -> List[Any]: _object_iter = self.data.q( focus_iri or self.focus_iri, predicate_paths, @@ -266,7 +270,7 @@ def _osf_related_resource_types(self) -> dict[str, bool]: for _key, _pred in _osf_artifact_types.items() } - def _related_agent_list(self, *predicate_iris, focus_iri=None): + def _related_agent_list(self, *predicate_iris: str, focus_iri: Optional[str] = None) -> List[Dict[str, Any]]: _agent_list = [] for _predicate_iri in predicate_iris: _agent_iri_iter = self.data.q( @@ -277,7 +281,7 @@ def _related_agent_list(self, *predicate_iris, focus_iri=None): _agent_list.append(self._related_agent(_predicate_iri, _agent_iri)) return _agent_list - def _related_agent(self, relation_iri, agent_iri): + def _related_agent(self, relation_iri: str, agent_iri: str) -> Dict[str, Any]: return { 'type': self._single_type(agent_iri), 'types': self._type_list(agent_iri), @@ -288,7 +292,7 @@ def _related_agent(self, relation_iri, agent_iri): # TODO 'order_cited': } - def _single_type_iri(self, type_iris) -> str | None: + def _single_type_iri(self, type_iris: List[Any] | Set[Any]) -> str | None | Any: # try SHAREv2 types _sharev2_type_iris = set(filter(SHAREv2.__contains__, type_iris)) if _sharev2_type_iris: @@ -309,7 +313,7 @@ def _single_type_iri(self, type_iris) -> str | None: return self._single_type_iri([SHAREv2[_typename], SHAREv2.CreativeWork]) return None - def _single_type(self, focus_iri): + def _single_type(self, focus_iri: str) -> str | None: _type_iris = set(self.data.q(focus_iri, RDF.type)) _type_iri = self._single_type_iri(_type_iris) return ( @@ -318,27 +322,27 @@ def _single_type(self, focus_iri): else None ) - def _type_list(self, focus_iri): + def _type_list(self, focus_iri: str) -> list[str]: return sorted( self._format_type_iri(_type_iri) for _type_iri in self.data.q(focus_iri, RDF.type) if _type_iri in SHAREv2 or _type_iri in OSFMAP ) - def _format_type_iri(self, iri): + def _format_type_iri(self, iri: str) -> str: if iri in SHAREv2: _typename = primitive_rdf.iri_minus_namespace(iri, namespace=SHAREv2) elif iri in OSFMAP: _typename = primitive_rdf.iri_minus_namespace(iri, namespace=OSFMAP) else: - return iri # oh well + return iri return self._format_typename(_typename) - def _format_typename(self, sharev2_typename: str): + def _format_typename(self, sharev2_typename: str) -> str: # convert from PascalCase to lower case with spaces between words return re.sub(r'\B([A-Z])', r' \1', sharev2_typename).lower() - def _work_lineage_list(self, work_iri): + def _work_lineage_list(self, work_iri: str) -> List[Optional[Dict[str, Any]]]: # expects a linear lineage (each resource only "part of" one other) _parent_iri = self._single_value(DCTERMS.isPartOf, focus_iri=work_iri) if isinstance(_parent_iri, str): @@ -350,7 +354,7 @@ def _work_lineage_list(self, work_iri): else: return [] - def _work_lineage_item(self, work_iri): + def _work_lineage_item(self, work_iri: str) -> Dict[str, Any]: return { 'type': self._single_type(work_iri), 'types': self._type_list(work_iri), @@ -358,7 +362,7 @@ def _work_lineage_item(self, work_iri): 'identifiers': self._string_list(DCTERMS.identifier, focus_iri=work_iri), } - def _subjects_and_synonyms(self, source_name): + def _subjects_and_synonyms(self, source_name: str) -> Tuple[List[str], List[str]]: _subjects = [] _subject_synonyms = [] # making extra osf-specific assumptions here @@ -374,7 +378,12 @@ def _subjects_and_synonyms(self, source_name): _subjects.append(_serialize_subject('bepress', _bepress_lineage)) return _subjects, _subject_synonyms - def _subject_lineage(self, subject_iri, label_predicate_iri, visiting_set=None) -> tuple[str, ...]: + def _subject_lineage( + self, + subject_iri: str, + label_predicate_iri: str, + visiting_set: Optional[Set[str]] = None + ) -> Tuple[str, ...]: _visiting_set = visiting_set or set() _visiting_set.add(subject_iri) _labeltext = next(self.data.q(subject_iri, label_predicate_iri), None) @@ -391,7 +400,7 @@ def _serialize_subject(taxonomy_name: str, subject_lineage: tuple[str, ...]) -> return '|'.join((taxonomy_name, *subject_lineage)) -def _obj_to_string_or_none(obj): +def _obj_to_string_or_none(obj: Optional[str]) -> None | str | Any: if obj is None: return None if isinstance(obj, primitive_rdf.Literal): diff --git a/trove/digestive_tract.py b/trove/digestive_tract.py index a91a9d633..0e2b7e0cb 100644 --- a/trove/digestive_tract.py +++ b/trove/digestive_tract.py @@ -11,9 +11,11 @@ import copy import datetime import logging +from typing import Iterable import celery from django.db import transaction +from django.db.models import QuerySet from primitive_metadata import primitive_rdf from share import models as share_db @@ -39,12 +41,12 @@ def ingest( focus_iri: str, record_mediatype: str, raw_record: str, - record_identifier: str = '', # default focus_iri + record_identifier: str | None = None, # default focus_iri is_supplementary: bool = False, expiration_date: datetime.date | None = None, # default "never" restore_deleted: bool = False, urgent: bool = False, -): +) -> None: '''ingest: shorthand for sniff + extract + (eventual) derive''' _suid = sniff( from_user=from_user, @@ -63,7 +65,7 @@ def ingest( expiration_date=expiration_date, ) for _card in _extracted_cards: - task__derive.delay(_card.id, urgent=urgent) + task__derive.delay(_card.pk, urgent=urgent) @transaction.atomic @@ -71,7 +73,7 @@ def sniff( *, # all keyword-args from_user: share_db.ShareUser, focus_iri: str, - record_identifier: str = '', + record_identifier: str | None = None, is_supplementary: bool = False, ) -> share_db.SourceUniqueIdentifier: '''sniff: get a vague sense of a metadata record without touching the record itself @@ -107,7 +109,7 @@ def sniff( _suid.focus_identifier = _focus_identifier _suid.save() else: - if _suid.focus_identifier_id != _focus_identifier.id: + if _suid.focus_identifier_id != _focus_identifier.pk: raise DigestiveError(f'suid focus_identifier should not change! suid={_suid}, focus changed from {_suid.focus_identifier} to {_focus_identifier}') return _suid @@ -134,7 +136,7 @@ def extract( if (expiration_date is not None) and (expiration_date <= datetime.date.today()): raise CannotDigestExpiredDatum(suid, expiration_date) _tripledicts_by_focus_iri = {} - _extractor = get_rdf_extractor_class(record_mediatype)(suid.source_config) + _extractor = get_rdf_extractor_class(record_mediatype)() # TODO normalize (or just validate) tripledict: # - synonymous iris should be grouped (only one as subject-key, others under owl:sameAs) # - focus should have rdf:type @@ -142,6 +144,7 @@ def extract( # - connected graph (all subject-key iris reachable from focus, or reverse for vocab terms?) _extracted_tripledict: primitive_rdf.RdfTripleDictionary = _extractor.extract_rdf(raw_record) if _extracted_tripledict: + assert suid.focus_identifier is not None try: _focus_iri = suid.focus_identifier.find_equivalent_iri(_extracted_tripledict) except ValueError: @@ -173,7 +176,7 @@ def extract( ) -def derive(indexcard: trove_db.Indexcard, deriver_iris=None): +def derive(indexcard: trove_db.Indexcard, deriver_iris: Iterable[str] | None = None) -> list[trove_db.DerivedIndexcard]: '''derive: build other kinds of index cards from the extracted rdf will create, update, or delete: @@ -209,7 +212,7 @@ def derive(indexcard: trove_db.Indexcard, deriver_iris=None): return _derived_list -def expel(from_user: share_db.ShareUser, record_identifier: str): +def expel(from_user: share_db.ShareUser, record_identifier: str) -> None: _suid_qs = share_db.SourceUniqueIdentifier.objects.filter( source_config__source__user=from_user, identifier=record_identifier, @@ -238,7 +241,7 @@ def expel_expired_data(today: datetime.date) -> None: ) -def _expel_supplementary_descriptions(supplementary_rdf_queryset) -> None: +def _expel_supplementary_descriptions(supplementary_rdf_queryset: QuerySet[trove_db.SupplementaryResourceDescription]) -> None: # delete expired supplementary metadata _affected_indexcards = set() for _supplement in supplementary_rdf_queryset.select_related('indexcard'): @@ -246,7 +249,7 @@ def _expel_supplementary_descriptions(supplementary_rdf_queryset) -> None: _affected_indexcards.add(_supplement.indexcard) _supplement.delete() for _indexcard in _affected_indexcards: - task__derive.delay(_indexcard.id) + task__derive.delay(_indexcard.pk) ### BEGIN celery tasks @@ -256,9 +259,9 @@ def task__derive( task: celery.Task, indexcard_id: int, deriver_iri: str | None = None, - notify_index=True, - urgent=False, -): + notify_index: bool = True, + urgent: bool = False, +) -> None: _indexcard = trove_db.Indexcard.objects.get(id=indexcard_id) derive( _indexcard, @@ -271,7 +274,7 @@ def task__derive( @celery.shared_task(acks_late=True) -def task__schedule_derive_for_source_config(source_config_id: int, notify_index=False): +def task__schedule_derive_for_source_config(source_config_id: int, notify_index: bool = False) -> None: _indexcard_id_qs = ( trove_db.Indexcard.objects .filter(source_record_suid__source_config_id=source_config_id) @@ -282,7 +285,7 @@ def task__schedule_derive_for_source_config(source_config_id: int, notify_index= @celery.shared_task(acks_late=True) -def task__schedule_all_for_deriver(deriver_iri: str, notify_index=False): +def task__schedule_all_for_deriver(deriver_iri: str, notify_index: bool = False) -> None: if not get_deriver_classes([deriver_iri]): raise DigestiveError(f'unknown deriver_iri: {deriver_iri}') _indexcard_id_qs = ( @@ -294,5 +297,5 @@ def task__schedule_all_for_deriver(deriver_iri: str, notify_index=False): @celery.shared_task(acks_late=True) -def task__expel_expired_data(): +def task__expel_expired_data() -> None: expel_expired_data(datetime.date.today()) diff --git a/trove/exceptions.py b/trove/exceptions.py index 37cd4bfd7..1e5a5ab73 100644 --- a/trove/exceptions.py +++ b/trove/exceptions.py @@ -7,7 +7,7 @@ class TroveError(Exception): http_status: int = http.HTTPStatus.INTERNAL_SERVER_ERROR error_location: str = '' - def __init__(self, *args): + def __init__(self, *args: object) -> None: super().__init__(*args) self.error_location = _get_nearest_code_location() diff --git a/trove/extract/__init__.py b/trove/extract/__init__.py index b2bde949e..3da9599a1 100644 --- a/trove/extract/__init__.py +++ b/trove/extract/__init__.py @@ -7,7 +7,7 @@ __all__ = ('get_rdf_extractor_class',) -def get_rdf_extractor_class(mediatype) -> type[BaseRdfExtractor]: +def get_rdf_extractor_class(mediatype: str) -> type[BaseRdfExtractor]: if mediatype == 'text/turtle': return TurtleRdfExtractor raise trove_exceptions.CannotDigestMediatype(mediatype) diff --git a/trove/extract/_base.py b/trove/extract/_base.py index 618e688f7..58344a8f4 100644 --- a/trove/extract/_base.py +++ b/trove/extract/_base.py @@ -4,9 +4,6 @@ class BaseRdfExtractor(abc.ABC): - def __init__(self, source_config): - self.source_config = source_config - @abc.abstractmethod def extract_rdf(self, input_document: str) -> primitive_rdf.RdfTripleDictionary: raise NotImplementedError diff --git a/trove/extract/turtle.py b/trove/extract/turtle.py index 79018f70f..fb3666a94 100644 --- a/trove/extract/turtle.py +++ b/trove/extract/turtle.py @@ -4,5 +4,5 @@ class TurtleRdfExtractor(BaseRdfExtractor): - def extract_rdf(self, input_document): + def extract_rdf(self, input_document: str): # type: ignore return primitive_rdf.tripledict_from_turtle(input_document) diff --git a/trove/management/commands/migrate_rawdatum_expiration.py b/trove/management/commands/migrate_rawdatum_expiration.py deleted file mode 100644 index b0373b35f..000000000 --- a/trove/management/commands/migrate_rawdatum_expiration.py +++ /dev/null @@ -1,51 +0,0 @@ -import datetime -import time - -from django.db.models import OuterRef - -from trove.util.django import pk_chunked - -from share import models as share_db -from share.management.commands import BaseShareCommand -from trove import models as trove_db - - -class Command(BaseShareCommand): - # copy all non-null values from `RawDatum.expiration_date` to `SupplementaryIndexcardRdf.expiration_date` - # (while being overly cautious to avoid joins on `RawDatum` or `SourceUniqueIdentifier`) - # meant to be run after trove migration 0008_expiration_dates, before share.RawDatum is deleted - - def add_arguments(self, parser): - parser.add_argument('--chunk-size', type=int, default=666) - parser.add_argument('--today', type=datetime.date.fromisoformat, default=datetime.date.today()) - parser.add_argument('--continue-after', type=str, default=None) - - def handle(self, *args, chunk_size: int, today: datetime.date, continue_after, **kwargs): - _before = time.perf_counter() - _total_updated = 0 - _raw_qs = ( - share_db.RawDatum.objects.latest_for_each_suid() - .filter(expiration_date__gt=today) # ignore the expired (and the non-expiring) - ) - if continue_after is not None: - _raw_qs = _raw_qs.filter(pk__gt=continue_after) - for _raw_pk_chunk in pk_chunked(_raw_qs, chunk_size): - _supp_qs = trove_db.SupplementaryIndexcardRdf.objects.filter( - from_raw_datum_id__in=_raw_pk_chunk, - expiration_date__isnull=True, # avoid overwriting non-null values - ) - _updated_count = _supp_qs.update( - expiration_date=share_db.RawDatum.objects.filter( - id=OuterRef('from_raw_datum_id'), - ).values('expiration_date'), - ) - _total_updated += _updated_count - _last_pk = _raw_pk_chunk[-1] - _elapsed = time.perf_counter() - _before - self.stdout.write( - f'{_elapsed:.2f}: migrated {_updated_count} of {len(_raw_pk_chunk)} --continue-after={_last_pk}', - ) - _total_seconds = time.perf_counter() - _before - self.stdout.write( - self.style.SUCCESS(f'done! migrated {_total_updated} in {_total_seconds}s'), - ) diff --git a/trove/migrations/0011_upgrade_django_5_2.py b/trove/migrations/0011_upgrade_django_5_2.py new file mode 100644 index 000000000..720cd5d2d --- /dev/null +++ b/trove/migrations/0011_upgrade_django_5_2.py @@ -0,0 +1,49 @@ +# Generated by Django 5.2.3 on 2025-06-25 19:21 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('trove', '0010_resource_description_rename'), + ] + + operations = [ + migrations.RenameIndex( + model_name='latestresourcedescription', + new_name='trove_lates_modifie_418889_idx', + old_name='trove_lates_modifie_c6b0b1_idx', + ), + migrations.RenameIndex( + model_name='latestresourcedescription', + new_name='trove_lates_expirat_70dd04_idx', + old_name='trove_lates_expirat_92ac89_idx', + ), + migrations.RenameIndex( + model_name='supplementaryresourcedescription', + new_name='trove_suppl_expirat_3cb612_idx', + old_name='trove_suppl_expirat_3ea6e1_idx', + ), + migrations.AlterField( + model_name='archivedresourcedescription', + name='indexcard', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='%(app_label)s_%(class)s_set', to='trove.indexcard'), + ), + migrations.AlterField( + model_name='indexcard', + name='focustype_identifier_set', + field=models.ManyToManyField(related_name='+', to='trove.resourceidentifier'), + ), + migrations.AlterField( + model_name='latestresourcedescription', + name='indexcard', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='%(app_label)s_%(class)s_set', to='trove.indexcard'), + ), + migrations.AlterField( + model_name='supplementaryresourcedescription', + name='indexcard', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='%(app_label)s_%(class)s_set', to='trove.indexcard'), + ), + ] diff --git a/trove/models/derived_indexcard.py b/trove/models/derived_indexcard.py index 52f0d3989..c8d1d5e64 100644 --- a/trove/models/derived_indexcard.py +++ b/trove/models/derived_indexcard.py @@ -1,9 +1,12 @@ from __future__ import annotations +from typing import TYPE_CHECKING from django.db import models from primitive_metadata import primitive_rdf as rdf from trove.models.resource_identifier import ResourceIdentifier +if TYPE_CHECKING: + from trove.derive._base import IndexcardDeriver __all__ = ('DerivedIndexcard',) @@ -31,14 +34,14 @@ class Meta: ), ] - def __repr__(self): - return f'<{self.__class__.__qualname__}({self.id}, {self.upriver_indexcard.uuid}, "{self.deriver_identifier.sufficiently_unique_iri}")' + def __repr__(self) -> str: + return f'<{self.__class__.__qualname__}({self.pk}, {self.upriver_indexcard.uuid}, "{self.deriver_identifier.sufficiently_unique_iri}")' - def __str__(self): + def __str__(self) -> str: return repr(self) @property - def deriver_cls(self): + def deriver_cls(self) -> type[IndexcardDeriver]: from trove.derive import get_deriver_classes (_deriver_cls,) = get_deriver_classes(self.deriver_identifier.raw_iri_list) return _deriver_cls diff --git a/trove/models/indexcard.py b/trove/models/indexcard.py index ba6de67d3..38cd13a55 100644 --- a/trove/models/indexcard.py +++ b/trove/models/indexcard.py @@ -1,6 +1,7 @@ from __future__ import annotations import datetime import uuid +from typing import Any from django.db import models from django.db import transaction @@ -25,8 +26,8 @@ __all__ = ('Indexcard',) -class IndexcardManager(models.Manager): - def get_for_iri(self, iri: str): +class IndexcardManager(models.Manager['Indexcard']): + def get_for_iri(self, iri: str) -> Indexcard: _uuid = rdf.iri_minus_namespace(iri, namespace=trove_indexcard_namespace()) return self.get(uuid=_uuid) @@ -49,7 +50,7 @@ def save_indexcards_from_tripledicts( restore_deleted=restore_deleted, expiration_date=expiration_date, ) - _focus_identifier_ids = {_fid.id for _fid in _indexcard.focus_identifier_set.all()} + _focus_identifier_ids = {str(_fid.pk) for _fid in _indexcard.focus_identifier_set.all()} if not _seen_focus_identifier_ids.isdisjoint(_focus_identifier_ids): _duplicates = ( ResourceIdentifier.objects @@ -62,7 +63,7 @@ def save_indexcards_from_tripledicts( for _indexcard_to_delete in ( Indexcard.objects .filter(source_record_suid=suid) - .exclude(id__in=[_card.id for _card in _indexcards]) + .exclude(id__in=[_card.pk for _card in _indexcards]) ): _indexcard_to_delete.pls_delete() _indexcards.append(_indexcard_to_delete) @@ -84,7 +85,7 @@ def supplement_indexcards_from_tripledicts( focus_iri=_focus_iri, expiration_date=expiration_date, )) - _seen_indexcard_ids = {_card.id for _card in _indexcards} + _seen_indexcard_ids = {_card.pk for _card in _indexcards} # supplementary data seen previously on this suid (but not this time) should be deleted for _supplement_to_delete in ( SupplementaryResourceDescription.objects @@ -104,7 +105,7 @@ def save_indexcard_from_tripledict( focus_iri: str, restore_deleted: bool = False, expiration_date: datetime.date | None = None, - ): + ) -> Indexcard: assert not suid.is_supplementary _focus_identifier_set = ( ResourceIdentifier.objects @@ -114,7 +115,7 @@ def save_indexcard_from_tripledict( ResourceIdentifier.objects.get_or_create_for_iri(_iri) for _iri in rdf_tripledict[focus_iri].get(RDF.type, ()) ] - _indexcard = Indexcard.objects.filter( + _indexcard: Indexcard | None = Indexcard.objects.filter( source_record_suid=suid, focus_identifier_set__in=_focus_identifier_set, ).first() @@ -193,7 +194,7 @@ def latest_resource_description(self) -> LatestResourceDescription: return self.trove_latestresourcedescription_set.get() # may raise DoesNotExist @property - def archived_description_set(self): + def archived_description_set(self) -> Any: '''convenience for the "other side" of ArchivedResourceDescription.indexcard returns a RelatedManager @@ -201,17 +202,17 @@ def archived_description_set(self): return self.trove_archivedresourcedescription_set @property - def supplementary_description_set(self): + def supplementary_description_set(self) -> Any: '''convenience for the "other side" of SupplementaryResourceDescription.indexcard returns a RelatedManager ''' return self.trove_supplementaryresourcedescription_set - def get_iri(self): + def get_iri(self) -> str: return trove_indexcard_iri(self.uuid) - def pls_delete(self, *, notify_indexes=True): + def pls_delete(self, *, notify_indexes: bool = True) -> None: # do not actually delete Indexcard, just mark deleted: if self.deleted is None: self.deleted = timezone.now() @@ -231,10 +232,10 @@ def pls_delete(self, *, notify_indexes=True): from share.search.index_messenger import IndexMessenger IndexMessenger().notify_indexcard_update([self]) - def __repr__(self): + def __repr__(self) -> str: return f'<{self.__class__.__qualname__}({self.uuid}, {self.source_record_suid})' - def __str__(self): + def __str__(self) -> str: return repr(self) @transaction.atomic diff --git a/trove/models/resource_description.py b/trove/models/resource_description.py index d5b43ffc1..8fcbc1652 100644 --- a/trove/models/resource_description.py +++ b/trove/models/resource_description.py @@ -60,10 +60,10 @@ def as_rdfdoc_with_supplements(self) -> rdf.RdfGraph: _rdfdoc.add_tripledict(_supplement.as_rdf_tripledict()) return _rdfdoc - def __repr__(self): - return f'<{self.__class__.__qualname__}({self.id}, "{self.focus_iri}")' + def __repr__(self) -> str: + return f'<{self.__class__.__qualname__}({self.pk}, "{self.focus_iri}")' - def __str__(self): + def __str__(self) -> str: return repr(self) @@ -74,18 +74,11 @@ class Meta: models.UniqueConstraint( fields=('indexcard',), name='trove_latestindexcardrdf_uniq_indexcard', - # TODO when on django 5.2: - # name='%(app_label)s_%(class)s_uniq_indexcard', - # ...and add migration with `AlterConstraint` to rename + # note: backcompat naming -- change if/when `RenameConstraint` exists ), ] indexes = [ - models.Index( - fields=('modified',), # for OAI-PMH selective harvest - name='trove_lates_modifie_c6b0b1_idx', - # TODO when on django 5.2: - # remove explicit name, add migration with `RenameIndex` to match - ), + models.Index(fields=('modified',)), # for OAI-PMH selective harvest models.Index(fields=['expiration_date']), # for expiring ] @@ -108,9 +101,7 @@ class Meta: models.UniqueConstraint( fields=('indexcard', 'supplementary_suid'), name='trove_supplementaryindexcardrdf_uniq_supplement', - # TODO when on django 5.2: - # name='%(app_label)s_%(class)s_uniq_supplement', - # ...and add migration with `AlterConstraint` to rename + # note: backcompat naming -- change if/when `RenameConstraint` exists ), ] indexes = [ diff --git a/trove/models/resource_identifier.py b/trove/models/resource_identifier.py index 6d2fe548b..9ba177ab5 100644 --- a/trove/models/resource_identifier.py +++ b/trove/models/resource_identifier.py @@ -1,8 +1,10 @@ +from __future__ import annotations import typing from django.core.exceptions import ValidationError from django.contrib.postgres.fields import ArrayField from django.db import models +from django.db.models import QuerySet from django.db.models.functions import Substr, StrIndex from primitive_metadata import primitive_rdf @@ -24,7 +26,7 @@ ) -def validate_iri_scheme(iri_scheme): +def validate_iri_scheme(iri_scheme: str) -> None: '''raise a django ValidationError if not a valid iri scheme ''' if not isinstance(iri_scheme, str): @@ -33,7 +35,7 @@ def validate_iri_scheme(iri_scheme): raise ValidationError('not a valid iri scheme') -def validate_sufficiently_unique_iri(suffuniq_iri: str): +def validate_sufficiently_unique_iri(suffuniq_iri: str) -> None: '''raise a django ValidationError if not a valid "sufficiently unique iri" ''' if not isinstance(suffuniq_iri, str): @@ -51,21 +53,21 @@ def validate_sufficiently_unique_iri(suffuniq_iri: str): raise ValidationError('need more iri beyond a scheme') -class ResourceIdentifierManager(models.Manager): - def queryset_for_iri(self, iri: str): +class ResourceIdentifierManager(models.Manager["ResourceIdentifier"]): + def queryset_for_iri(self, iri: str) -> QuerySet[ResourceIdentifier]: return self.queryset_for_iris((iri,)) - def queryset_for_iris(self, iris: typing.Iterable[str]): + def queryset_for_iris(self, iris: typing.Iterable[str]) -> QuerySet[ResourceIdentifier]: # may raise if invalid _suffuniq_iris = set() for _iri in iris: _suffuniq_iris.add(get_sufficiently_unique_iri(_iri)) return self.filter(sufficiently_unique_iri__in=_suffuniq_iris) - def get_for_iri(self, iri: str) -> 'ResourceIdentifier': + def get_for_iri(self, iri: str) -> ResourceIdentifier: return self.queryset_for_iri(iri).get() # may raise ResourceIdentifier.DoesNotExist - def get_or_create_for_iri(self, iri: str) -> 'ResourceIdentifier': + def get_or_create_for_iri(self, iri: str) -> ResourceIdentifier: # may raise if invalid (_suffuniq_iri, _scheme) = get_sufficiently_unique_iri_and_scheme(iri) (_identifier, _created) = self.get_or_create( @@ -146,10 +148,10 @@ class Meta: ), ] - def __repr__(self): - return f'<{self.__class__.__qualname__}({self.id}, "{self.sufficiently_unique_iri}")' + def __repr__(self) -> str: + return f'<{self.__class__.__qualname__}({self.pk}, "{self.sufficiently_unique_iri}")' - def __str__(self): + def __str__(self) -> str: return repr(self) def as_iri(self) -> str: @@ -176,7 +178,7 @@ def choose_a_scheme(self) -> str: _scheme = self.scheme_list[0] return _scheme - def equivalent_to_iri(self, iri: str): + def equivalent_to_iri(self, iri: str) -> bool: return (self.sufficiently_unique_iri == get_sufficiently_unique_iri(iri)) def find_equivalent_iri(self, tripledict: primitive_rdf.RdfTripleDictionary) -> str: diff --git a/trove/openapi.py b/trove/openapi.py index 12ecc80b7..0ed880583 100644 --- a/trove/openapi.py +++ b/trove/openapi.py @@ -1,11 +1,11 @@ import itertools import json -from typing import Iterable +from typing import Iterable, Generator, Any, Tuple from django.conf import settings from primitive_metadata import primitive_rdf -from share.version import __version__ +from share.version import get_shtrove_version from trove.util.randomness import shuffled from trove.vocab import mediatypes from trove.vocab.jsonapi import JSONAPI_MEMBERNAME @@ -25,7 +25,7 @@ def get_trove_openapi_json() -> str: return json.dumps(get_trove_openapi(), indent=2) -def get_trove_openapi() -> dict: +def get_trove_openapi() -> dict[str, Any]: '''generate an openapi description of the trove api following https://spec.openapis.org/oas/v3.1.0 @@ -49,7 +49,7 @@ def get_trove_openapi() -> dict: 'email': 'share-support@osf.io', }, # 'license': - 'version': __version__, + 'version': get_shtrove_version(), }, 'servers': [{ 'url': settings.SHARE_WEB_URL, @@ -65,7 +65,7 @@ def get_trove_openapi() -> dict: } -def _openapi_parameters(path_iris: Iterable[str], api_graph: primitive_rdf.RdfGraph): +def _openapi_parameters(path_iris: Iterable[str], api_graph: primitive_rdf.RdfGraph) -> Iterable[tuple[str, Any]]: _param_iris = set(itertools.chain(*( api_graph.q(_path_iri, TROVE.hasParameter) for _path_iri in path_iris @@ -95,7 +95,7 @@ def _openapi_parameters(path_iris: Iterable[str], api_graph: primitive_rdf.RdfGr } -def _openapi_examples(path_iris: Iterable[str], api_graph: primitive_rdf.RdfGraph): +def _openapi_examples(path_iris: Iterable[str], api_graph: primitive_rdf.RdfGraph) -> Iterable[tuple[str, Any]]: # assumes examples are blank nodes (frozenset of twoples) _examples = set(itertools.chain(*( api_graph.q(_path_iri, TROVE.example) @@ -129,10 +129,10 @@ def _openapi_examples(path_iris: Iterable[str], api_graph: primitive_rdf.RdfGrap } -def _openapi_path(path_iri: str, api_graph: primitive_rdf.RdfGraph): +def _openapi_path(path_iri: str, api_graph: primitive_rdf.RdfGraph) -> Tuple[str, Any]: # TODO: better error message on absence try: - _path = next(_text(path_iri, TROVE.iriPath, api_graph)) + _path = next(iter(_text(path_iri, TROVE.iriPath, api_graph))) except StopIteration: raise ValueError(f'could not find trove:iriPath for {path_iri}') _label = ' '.join(_text(path_iri, RDFS.label, api_graph)) @@ -166,7 +166,7 @@ def _openapi_path(path_iri: str, api_graph: primitive_rdf.RdfGraph): } -def _concept_markdown_blocks(concept_iri: str, api_graph: primitive_rdf.RdfGraph): +def _concept_markdown_blocks(concept_iri: str, api_graph: primitive_rdf.RdfGraph) -> Generator[str, None, None]: for _label in api_graph.q(concept_iri, RDFS.label): yield f'## {_label.unicode_value}' for _comment in api_graph.q(concept_iri, RDFS.comment): @@ -175,12 +175,12 @@ def _concept_markdown_blocks(concept_iri: str, api_graph: primitive_rdf.RdfGraph yield _desc.unicode_value -def _text(subj, pred, api_graph): +def _text(subj: Any, pred: Any, api_graph: primitive_rdf.RdfGraph) -> Iterable[str]: for _obj in api_graph.q(subj, pred): yield _obj.unicode_value -def _markdown_description(subj_iri: str, api_graph: primitive_rdf.RdfGraph): +def _markdown_description(subj_iri: str, api_graph: primitive_rdf.RdfGraph) -> str: return '\n\n'.join(( *( _description.unicode_value diff --git a/trove/render/__init__.py b/trove/render/__init__.py index 2e1350ac4..c5bf699a1 100644 --- a/trove/render/__init__.py +++ b/trove/render/__init__.py @@ -1,3 +1,5 @@ +from typing import Type + from django import http from trove import exceptions as trove_exceptions @@ -23,6 +25,10 @@ TrovesearchSimpleTsvRenderer, ) +RendersType = Type[ + BaseRenderer | RdfHtmlBrowseRenderer | RdfJsonapiRenderer | RdfTurtleRenderer | RdfJsonldRenderer | TrovesearchSimpleCsvRenderer | TrovesearchSimpleJsonRenderer | TrovesearchSimpleTsvRenderer +] + RENDERER_BY_MEDIATYPE = { _renderer_type.MEDIATYPE: _renderer_type for _renderer_type in RENDERERS diff --git a/trove/render/_base.py b/trove/render/_base.py index 48cfe1cc8..49a3a52ec 100644 --- a/trove/render/_base.py +++ b/trove/render/_base.py @@ -35,16 +35,16 @@ class BaseRenderer(abc.ABC): thesaurus_tripledict: rdf.RdfTripleDictionary = dataclasses.field(default_factory=lambda: TROVE_API_THESAURUS) @classmethod - def get_deriver_iri(cls, card_blending: bool): + def get_deriver_iri(cls, card_blending: bool) -> str | None: # override if needed return cls.INDEXCARD_DERIVER_IRI @functools.cached_property - def thesaurus(self): + def thesaurus(self) -> 'rdf.RdfGraph': return rdf.RdfGraph(self.thesaurus_tripledict) @functools.cached_property - def response_data(self): + def response_data(self) -> 'rdf.RdfGraph': return rdf.RdfGraph(self.response_tripledict) @functools.cached_property diff --git a/trove/render/_html.py b/trove/render/_html.py index 45f775880..6daa1e037 100644 --- a/trove/render/_html.py +++ b/trove/render/_html.py @@ -1,10 +1,12 @@ from __future__ import annotations +from collections.abc import Generator import contextlib import dataclasses from xml.etree.ElementTree import ( Element, SubElement, ) +from typing import Any from primitive_metadata import primitive_rdf as rdf @@ -19,7 +21,7 @@ class HtmlBuilder: _nested_elements: list[Element] = dataclasses.field(default_factory=list) _heading_depth: int = 0 - def __post_init__(self): + def __post_init__(self) -> None: self._nested_elements.append(self.given_root) @property @@ -34,7 +36,7 @@ def _current_element(self) -> Element: # html-building helper methods @contextlib.contextmanager - def nest_h_tag(self, **kwargs): + def nest_h_tag(self, **kwargs: Any) -> Generator[Element]: _outer_heading_depth = self._heading_depth if not _outer_heading_depth: self._heading_depth = 1 @@ -48,7 +50,7 @@ def nest_h_tag(self, **kwargs): self._heading_depth = _outer_heading_depth @contextlib.contextmanager - def nest(self, tag_name, attrs=None): + def nest(self, tag_name: str, attrs: dict | None = None) -> Generator[Element]: _attrs = {**attrs} if attrs else {} _nested_element = SubElement(self._current_element, tag_name, _attrs) self._nested_elements.append(_nested_element) @@ -58,7 +60,7 @@ def nest(self, tag_name, attrs=None): _popped_element = self._nested_elements.pop() assert _popped_element is _nested_element - def leaf(self, tag_name, *, text=None, attrs=None): + def leaf(self, tag_name: str, *, text: str | None = None, attrs: dict | None = None) -> None: _leaf_element = SubElement(self._current_element, tag_name, attrs or {}) if isinstance(text, rdf.Literal): # TODO: lang diff --git a/trove/render/_rendering.py b/trove/render/_rendering.py index 52e5f9e2c..0de9b015a 100644 --- a/trove/render/_rendering.py +++ b/trove/render/_rendering.py @@ -1,6 +1,6 @@ import abc import dataclasses -from typing import Iterator +from typing import Iterator, Generator from trove import exceptions as trove_exceptions @@ -30,7 +30,7 @@ class SimpleRendering: # implements ProtoRendering mediatype: str rendered_content: str = '' - def iter_content(self): + def iter_content(self) -> Generator[str]: yield self.rendered_content @@ -40,7 +40,7 @@ class StreamableRendering: # implements ProtoRendering content_stream: Iterator[str | bytes | memoryview] _started_already: bool = False - def iter_content(self): + def iter_content(self) -> Iterator[str | bytes | memoryview]: if self._started_already: raise trove_exceptions.CannotRenderStreamTwice self._started_already = True diff --git a/trove/render/_simple_trovesearch.py b/trove/render/_simple_trovesearch.py index 6e6ba6eb1..36bc36c4b 100644 --- a/trove/render/_simple_trovesearch.py +++ b/trove/render/_simple_trovesearch.py @@ -1,5 +1,7 @@ +from __future__ import annotations +from collections.abc import Generator, Iterator import json -from typing import Iterator, Any +from typing import Any, TYPE_CHECKING from primitive_metadata import primitive_rdf as rdf @@ -8,6 +10,8 @@ from trove.vocab.namespaces import TROVE, RDF from ._base import BaseRenderer from ._rendering import ProtoRendering, SimpleRendering +if TYPE_CHECKING: + from trove.util.json import JsonObject class SimpleTrovesearchRenderer(BaseRenderer): @@ -16,22 +20,22 @@ class SimpleTrovesearchRenderer(BaseRenderer): (very entangled with trove/trovesearch/trovesearch_gathering.py) ''' PASSIVE_RENDER = False # knows the properties it cares about - _page_links: set + _page_links: set[str] __already_iterated_cards = False - def simple_unicard_rendering(self, card_iri: str, osfmap_json: dict) -> str: + def simple_unicard_rendering(self, card_iri: str, osfmap_json: JsonObject) -> str: raise NotImplementedError - def simple_multicard_rendering(self, cards: Iterator[tuple[str, dict]]) -> str: + def simple_multicard_rendering(self, cards: Iterator[tuple[str, JsonObject]]) -> str: raise NotImplementedError - def unicard_rendering(self, card_iri: str, osfmap_json: dict) -> ProtoRendering: + def unicard_rendering(self, card_iri: str, osfmap_json: JsonObject) -> ProtoRendering: return SimpleRendering( # type: ignore[return-value] mediatype=self.MEDIATYPE, rendered_content=self.simple_unicard_rendering(card_iri, osfmap_json), ) - def multicard_rendering(self, card_pages: Iterator[dict[str, dict]]) -> ProtoRendering: + def multicard_rendering(self, card_pages: Iterator[dict[str, JsonObject]]) -> ProtoRendering: _cards = ( (_card_iri, _card_contents) for _page in card_pages @@ -53,7 +57,7 @@ def render_document(self) -> ProtoRendering: ) raise trove_exceptions.UnsupportedRdfType(_focustypes) - def _iter_card_pages(self) -> Iterator[dict[str, Any]]: + def _iter_card_pages(self) -> Generator[dict[str, JsonObject]]: assert not self.__already_iterated_cards self.__already_iterated_cards = True self._page_links = set() @@ -87,7 +91,7 @@ def _get_card_content( self, card: str | rdf.RdfBlanknode, graph: rdf.RdfGraph | None = None, - ) -> dict: + ) -> Any: if isinstance(card, str): _card_content = ( next(self.response_gathering.ask(TROVE.resourceMetadata, focus=card)) diff --git a/trove/render/html_browse.py b/trove/render/html_browse.py index 9fef803dd..1f5bffd6f 100644 --- a/trove/render/html_browse.py +++ b/trove/render/html_browse.py @@ -1,4 +1,7 @@ -from collections.abc import Iterator +from collections.abc import ( + Iterator, + Generator, +) import contextlib import dataclasses import datetime @@ -68,7 +71,7 @@ class RdfHtmlBrowseRenderer(BaseRenderer): __hb: HtmlBuilder = dataclasses.field(init=False) __last_hue_turn: float = dataclasses.field(default_factory=random.random) - def __post_init__(self): + def __post_init__(self) -> None: # TODO: lang (according to request -- also translate) self.__current_data = self.response_tripledict self.__visiting_iris = set() @@ -94,37 +97,37 @@ def simple_render_document(self) -> str: etree_tostring(self.__hb.root_element, encoding='unicode', method='html'), )) - def render_html_head(self): + def render_html_head(self) -> None: with self.__hb.nest('head'): self.__hb.leaf('link', attrs={ 'rel': 'stylesheet', 'href': staticfiles_storage.url('css/browse.css'), }) - def render_nav(self): + def render_nav(self) -> None: with self.__hb.nest('nav'): self.__alternate_mediatypes_card() if self.is_data_blended is not None: self.__blender_toggle_card() - def render_main(self): + def render_main(self) -> None: with self.__hb.nest('main'): for _iri in self.response_focus.iris: self.__render_subj(_iri) # TODO: show additional unvisited triples? - def render_footer(self): + def render_footer(self) -> None: with self.__hb.nest('footer'): ... - def __alternate_mediatypes_card(self): + def __alternate_mediatypes_card(self) -> None: with self.__nest_card('details'): self.__hb.leaf('summary', text=_('alternate mediatypes')) for _mediatype in shuffled((*STABLE_MEDIATYPES, *UNSTABLE_MEDIATYPES)): with self.__hb.nest('span', attrs={'class': 'Browse__literal'}): self.__mediatype_link(_mediatype) - def __blender_toggle_card(self): + def __blender_toggle_card(self) -> None: with self.__nest_card('details'): if self.is_data_blended: _header_text = _('card-blending ON') @@ -139,7 +142,7 @@ def __blender_toggle_card(self): 'href': self._queryparam_href('blendCards', _link_blend), }) - def __mediatype_link(self, mediatype: str): + def __mediatype_link(self, mediatype: str) -> None: self.__hb.leaf('a', text=mediatype, attrs={ 'href': self._queryparam_href('acceptMediatype', mediatype), }) @@ -150,7 +153,7 @@ def __mediatype_link(self, mediatype: str): with self.__hb.nest('a', attrs={'href': reverse('trove:docs')}) as _link: _link.text = _('(stable for documented use)') - def __render_subj(self, subj_iri: str, *, start_collapsed=None): + def __render_subj(self, subj_iri: str, *, start_collapsed: bool | None = None) -> None: _twopledict = self.__current_data.get(subj_iri, {}) with self.__visiting(subj_iri): with self.__nest_card('article'): @@ -184,7 +187,7 @@ def __render_subj(self, subj_iri: str, *, start_collapsed=None): self.__hb.leaf('summary', text=_('more details...')) self.__twoples(_twopledict) - def __twoples(self, twopledict: rdf.RdfTwopleDictionary): + def __twoples(self, twopledict: rdf.RdfTwopleDictionary) -> None: with self.__hb.nest('dl', {'class': 'Browse__twopleset'}): for _pred, _obj_set in shuffled(twopledict.items()): with self.__hb.nest('dt', attrs={'class': 'Browse__predicate'}): @@ -195,7 +198,7 @@ def __twoples(self, twopledict: rdf.RdfTwopleDictionary): for _obj in shuffled(_obj_set): self.__obj(_obj) - def __obj(self, obj: rdf.RdfObject): + def __obj(self, obj: rdf.RdfObject) -> None: if isinstance(obj, str): # iri # TODO: detect whether indexcard? if (obj in self.__current_data) and (obj not in self.__visiting_iris): @@ -220,7 +223,7 @@ def __literal( literal: rdf.Literal | str, *, is_rdf_object: bool = False, - ): + ) -> None: _lit = (literal if isinstance(literal, rdf.Literal) else rdf.literal(literal)) _markdown_iri = rdf.iri_from_mediatype('text/markdown') _is_markdown = any( @@ -241,7 +244,7 @@ def __literal( else: self.__hb.leaf('q', text=_lit) - def __sequence(self, sequence_twoples: frozenset): + def __sequence(self, sequence_twoples: frozenset[rdf.RdfTwople]) -> None: _obj_in_order = list(rdf.sequence_objects_in_order(sequence_twoples)) with self.__hb.nest('details', attrs={'open': '', 'class': 'Browse__blanknode Browse__object'}): _text = _('sequence of %(count)s') % {'count': len(_obj_in_order)} @@ -251,11 +254,11 @@ def __sequence(self, sequence_twoples: frozenset): with self.__hb.nest('li'): # , visible=True): self.__obj(_seq_obj) - def __quoted_graph(self, quoted_graph: rdf.QuotedGraph): + def __quoted_graph(self, quoted_graph: rdf.QuotedGraph) -> None: with self.__quoted_data(quoted_graph.tripledict): self.__render_subj(quoted_graph.focus_iri) # , start_collapsed=True) - def __blanknode(self, blanknode: rdf.RdfTwopleDictionary | frozenset): + def __blanknode(self, blanknode: rdf.RdfTwopleDictionary | frozenset) -> None: _twopledict = ( blanknode if isinstance(blanknode, dict) @@ -269,11 +272,11 @@ def __blanknode(self, blanknode: rdf.RdfTwopleDictionary | frozenset): self.__hb.leaf('summary', text='(blank node)') self.__twoples(_twopledict) - def __split_iri_pre(self, iri: str): + def __split_iri_pre(self, iri: str) -> None: self.__hb.leaf('pre', text='\n'.join(self.__iri_lines(iri))) @contextlib.contextmanager - def __visiting(self, iri: str): + def __visiting(self, iri: str) -> Iterator[None]: assert iri not in self.__visiting_iris self.__visiting_iris.add(iri) try: @@ -282,7 +285,7 @@ def __visiting(self, iri: str): self.__visiting_iris.remove(iri) @contextlib.contextmanager - def __quoted_data(self, quoted_data: dict): + def __quoted_data(self, quoted_data: dict) -> Generator[None]: _outer_data = self.__current_data _outer_visiting_iris = self.__visiting_iris self.__current_data = quoted_data @@ -293,12 +296,12 @@ def __quoted_data(self, quoted_data: dict): self.__current_data = _outer_data self.__visiting_iris = _outer_visiting_iris - def __iri_link_and_labels(self, iri: str): + def __iri_link_and_labels(self, iri: str) -> None: self.__compact_link(iri) for _text in self.__iri_thesaurus_labels(iri): self.__literal(_text) - def __nest_link(self, iri: str): + def __nest_link(self, iri: str) -> contextlib.AbstractContextManager[Element]: _href = ( iri if _is_local_url(iri) @@ -306,12 +309,12 @@ def __nest_link(self, iri: str): ) return self.__hb.nest('a', attrs={'href': _href}) - def __compact_link(self, iri: str): + def __compact_link(self, iri: str) -> Element: with self.__nest_link(iri) as _a: _a.text = self.iri_shorthand.compact_iri(iri) return _a - def __nest_card(self, tag: str): + def __nest_card(self, tag: str) -> contextlib.AbstractContextManager[Element]: return self.__hb.nest( tag, attrs={ @@ -320,7 +323,7 @@ def __nest_card(self, tag: str): }, ) - def __iri_thesaurus_labels(self, iri: str): + def __iri_thesaurus_labels(self, iri: str) -> list[str]: # TODO: consider requested language _labels: set[rdf.RdfObject] = set() _suffuniq = get_sufficiently_unique_iri(iri) @@ -334,12 +337,12 @@ def __iri_thesaurus_labels(self, iri: str): _labels.update(_twoples.get(_pred, ())) return shuffled(_labels) - def _hue_turn_css(self): + def _hue_turn_css(self) -> str: _hue_turn = (self.__last_hue_turn + _PHI) % 1.0 self.__last_hue_turn = _hue_turn return f'--hue-turn: {_hue_turn}turn;' - def _queryparam_href(self, param_name: str, param_value: str | None): + def _queryparam_href(self, param_name: str, param_value: str | None) -> str: _base_url = self.response_focus.single_iri() if not _is_local_url(_base_url): _base_url = trove_browse_link(_base_url) @@ -383,7 +386,7 @@ def __iri_lines(self, iri: str) -> Iterator[str]: yield f'#{_fragment}' -def _append_class(el: Element, element_class: str): +def _append_class(el: Element, element_class: str) -> None: el.set( 'class', ' '.join(filter(None, (element_class, el.get('class')))), diff --git a/trove/render/jsonapi.py b/trove/render/jsonapi.py index 6337e7edc..e60fc2338 100644 --- a/trove/render/jsonapi.py +++ b/trove/render/jsonapi.py @@ -1,3 +1,4 @@ +from __future__ import annotations import base64 from collections import defaultdict import contextlib @@ -6,8 +7,9 @@ import itertools import json import time -from typing import Iterable, Union +from typing import Iterable, Union, List, Any, Dict, Tuple, Iterator +from typing import Optional from primitive_metadata import primitive_rdf from trove import exceptions as trove_exceptions @@ -31,14 +33,14 @@ # a jsonapi resource may pull rdf data using an iri or blank node # (using conventions from py for rdf as python primitives) -_IriOrBlanknode = Union[str, frozenset] +_IriOrBlanknode = Union[str, frozenset[Any]] -def _resource_ids_defaultdict(): +def _resource_ids_defaultdict() -> defaultdict[Any, str]: _prefix = str(time.time_ns()) _ints = itertools.count() - def _iter_ids(): + def _iter_ids() -> Iterator[str]: while True: _id = next(_ints) yield f'{_prefix}-{_id}' @@ -69,17 +71,17 @@ class RdfJsonapiRenderer(BaseRenderer): MEDIATYPE = mediatypes.JSONAPI INDEXCARD_DERIVER_IRI = TROVE['derive/osfmap_json'] - _identifier_object_cache: dict = dataclasses.field(default_factory=dict) + _identifier_object_cache: dict[str | frozenset[_IriOrBlanknode], Any] = dataclasses.field(default_factory=dict) _id_namespace_set: Iterable[primitive_rdf.IriNamespace] = (trove_indexcard_namespace(),) __to_include: set[primitive_rdf.RdfObject] | None = None - __assigned_blanknode_resource_ids: defaultdict[frozenset, str] = dataclasses.field( + __assigned_blanknode_resource_ids: defaultdict[frozenset[_IriOrBlanknode], str] = dataclasses.field( default_factory=_resource_ids_defaultdict, repr=False, ) # override BaseRenderer @classmethod - def get_deriver_iri(cls, card_blending: bool): + def get_deriver_iri(cls, card_blending: bool) -> str | None: return (None if card_blending else super().get_deriver_iri(card_blending)) def simple_render_document(self) -> str: @@ -88,7 +90,7 @@ def simple_render_document(self) -> str: indent=2, # TODO: pretty-print query param? ) - def render_dict(self, primary_iris: Union[str, Iterable[str]]) -> dict: + def render_dict(self, primary_iris: Union[str, Iterable[str]]) -> dict[str, Any]: _primary_data: dict | list | None = None _included_data = [] with self._contained__to_include() as _to_include: @@ -111,7 +113,7 @@ def render_dict(self, primary_iris: Union[str, Iterable[str]]) -> dict: _document['included'] = _included_data return _document - def render_resource_object(self, iri_or_blanknode: _IriOrBlanknode) -> dict: + def render_resource_object(self, iri_or_blanknode: _IriOrBlanknode) -> dict[str, Any]: _resource_object = {**self.render_identifier_object(iri_or_blanknode)} _twopledict = ( (self.response_data.tripledict.get(iri_or_blanknode) or {}) @@ -125,7 +127,7 @@ def render_resource_object(self, iri_or_blanknode: _IriOrBlanknode) -> dict: _resource_object.setdefault('links', {})['self'] = iri_or_blanknode return _resource_object - def render_identifier_object(self, iri_or_blanknode: _IriOrBlanknode): + def render_identifier_object(self, iri_or_blanknode: _IriOrBlanknode) -> Any | dict[str, Any]: try: return self._identifier_object_cache[iri_or_blanknode] except KeyError: @@ -154,7 +156,7 @@ def render_identifier_object(self, iri_or_blanknode: _IriOrBlanknode): self._identifier_object_cache[iri_or_blanknode] = _id_obj return _id_obj - def _single_typename(self, type_iris: list[str]): + def _single_typename(self, type_iris: list[str]) -> Optional[str]: if not type_iris: return '' if len(type_iris) == 1: @@ -166,7 +168,7 @@ def _single_typename(self, type_iris: list[str]): return self._membername_for_iri(_type_iris[0]) return self._membername_for_iri(sorted(type_iris)[0]) - def _membername_for_iri(self, iri: str): + def _membername_for_iri(self, iri: str) -> Optional[str] | Any: try: _membername = next(self.thesaurus.q(iri, JSONAPI_MEMBERNAME)) except StopIteration: @@ -177,10 +179,10 @@ def _membername_for_iri(self, iri: str): raise trove_exceptions.ExpectedLiteralObject((iri, JSONAPI_MEMBERNAME, _membername)) return self.iri_shorthand.compact_iri(iri) - def _resource_id_for_blanknode(self, blanknode: frozenset, /): + def _resource_id_for_blanknode(self, blanknode: frozenset[Any]) -> str: return self.__assigned_blanknode_resource_ids[blanknode] - def _resource_id_for_iri(self, iri: str): + def _resource_id_for_iri(self, iri: str) -> Any: for _iri_namespace in self._id_namespace_set: if iri in _iri_namespace: return primitive_rdf.iri_minus_namespace(iri, namespace=_iri_namespace) @@ -191,12 +193,12 @@ def _resource_id_for_iri(self, iri: str): # as fallback, encode the iri into a valid jsonapi member name return base64.urlsafe_b64encode(iri.encode()).decode() - def _render_field(self, predicate_iri, object_set, *, into: dict): + def _render_field(self, predicate_iri: str, object_set: Iterable[Any], *, into: dict[str, Any]) -> None: _is_relationship = (predicate_iri, RDF.type, JSONAPI_RELATIONSHIP) in self.thesaurus _is_attribute = (predicate_iri, RDF.type, JSONAPI_ATTRIBUTE) in self.thesaurus _field_key = self._membername_for_iri(predicate_iri) _doc_key = 'meta' # unless configured for jsonapi, default to unstructured 'meta' - if ':' not in _field_key: + if ':' not in _field_key: # type: ignore if _is_relationship: _doc_key = 'relationships' elif _is_attribute: @@ -204,25 +206,29 @@ def _render_field(self, predicate_iri, object_set, *, into: dict): if _is_relationship: _fieldvalue = self._render_relationship_object(predicate_iri, object_set) else: - _fieldvalue = self._one_or_many(predicate_iri, self._attribute_datalist(object_set)) + _fieldvalue = self._one_or_many(predicate_iri, self._attribute_datalist(object_set)) # type: ignore # update the given `into` resource object into.setdefault(_doc_key, {})[_field_key] = _fieldvalue - def _one_or_many(self, predicate_iri: str, datalist: list): + def _one_or_many(self, predicate_iri: str, datalist: list[Any]) -> Union[list[Any], Any, None]: _only_one = (predicate_iri, RDF.type, OWL.FunctionalProperty) in self.thesaurus if _only_one: if len(datalist) > 1: raise trove_exceptions.OwlObjection(f'multiple objects for to-one relation <{predicate_iri}>: {datalist}') - return (datalist[0] if datalist else None) + return datalist[0] if datalist else None return datalist - def _attribute_datalist(self, object_set): + def _attribute_datalist(self, object_set: Iterable[Any]) -> List[Any]: return [ self._render_attribute_datum(_obj) for _obj in object_set ] - def _render_relationship_object(self, predicate_iri, object_set): + def _render_relationship_object( + self, + predicate_iri: str, + object_set: Iterable[Union[frozenset[Any], str]] + ) -> Dict[str, Any]: _data = [] _links = {} for _obj in object_set: @@ -248,7 +254,7 @@ def _render_relationship_object(self, predicate_iri, object_set): _relationship_obj['links'] = _links return _relationship_obj - def _render_link_object(self, link_obj: frozenset): + def _render_link_object(self, link_obj: frozenset[Tuple[Any, Any]]) -> Tuple[str, Dict[str, Any]]: _membername = next( _obj.unicode_value for _pred, _obj in link_obj @@ -270,7 +276,7 @@ def _render_link_object(self, link_obj: frozenset): } return _membername, _rendered_link - def _make_object_gen(self, object_set): + def _make_object_gen(self, object_set: frozenset[Any]) -> Iterator[Any]: for _obj in object_set: if isinstance(_obj, frozenset) and ((RDF.type, RDF.Seq) in _obj): yield from primitive_rdf.sequence_objects_in_order(_obj) @@ -278,7 +284,7 @@ def _make_object_gen(self, object_set): yield _obj @contextlib.contextmanager - def _contained__to_include(self): + def _contained__to_include(self) -> Iterator[set[primitive_rdf.RdfObject]]: assert self.__to_include is None self.__to_include = set() try: @@ -286,11 +292,11 @@ def _contained__to_include(self): finally: self.__to_include = None - def _pls_include(self, item): + def _pls_include(self, item: Any) -> None: if self.__to_include is not None: self.__to_include.add(item) - def _render_attribute_datum(self, rdfobject: primitive_rdf.RdfObject) -> dict | list | str | float | int: + def _render_attribute_datum(self, rdfobject: primitive_rdf.RdfObject) -> dict[Any, Any] | list[Any] | str | float | int: if isinstance(rdfobject, frozenset): if (RDF.type, RDF.Seq) in rdfobject: return [ diff --git a/trove/render/jsonld.py b/trove/render/jsonld.py index 9ac61554b..a7ca263c6 100644 --- a/trove/render/jsonld.py +++ b/trove/render/jsonld.py @@ -1,6 +1,8 @@ +from __future__ import annotations import contextlib import datetime import json +from typing import Any, Iterator, TYPE_CHECKING from primitive_metadata import primitive_rdf as rdf @@ -8,6 +10,11 @@ from trove.vocab.namespaces import RDF, OWL, TROVE from trove.vocab import mediatypes from ._base import BaseRenderer +if TYPE_CHECKING: + from trove.util.json import ( + JsonObject, + JsonValue, + ) _PREDICATES_OF_FLEXIBLE_CARDINALITY = { @@ -20,7 +27,7 @@ class RdfJsonldRenderer(BaseRenderer): MEDIATYPE = mediatypes.JSONLD INDEXCARD_DERIVER_IRI = TROVE['derive/osfmap_json'] - __visiting_iris: set | None = None + __visiting_iris: set[str] | None = None def simple_render_document(self) -> str: return json.dumps( @@ -34,7 +41,7 @@ def render_jsonld( rdfgraph: rdf.RdfGraph, focus_iri: str, with_context: bool = False, - ) -> dict: + ) -> JsonObject: with self.iri_shorthand.track_used_shorts() as _used_shorts: _rendered = self.rdfobject_as_jsonld(focus_iri, rdfgraph.tripledict) if with_context: @@ -44,7 +51,7 @@ def render_jsonld( } return _rendered - def literal_as_jsonld(self, rdfliteral: rdf.Literal): + def literal_as_jsonld(self, rdfliteral: rdf.Literal) -> JsonObject: if not rdfliteral.datatype_iris or rdfliteral.datatype_iris == {RDF.string}: return {'@value': rdfliteral.unicode_value} if RDF.JSON in rdfliteral.datatype_iris: @@ -74,7 +81,7 @@ def rdfobject_as_jsonld( self, rdfobject: rdf.RdfObject, tripledict: rdf.RdfTripleDictionary | None = None, - ): + ) -> JsonObject: if isinstance(rdfobject, str): return self.iri_as_jsonld(rdfobject, tripledict) elif isinstance(rdfobject, frozenset): @@ -95,7 +102,7 @@ def blanknode_as_jsonld( self, blanknode: rdf.RdfBlanknode, tripledict: rdf.RdfTripleDictionary | None = None, - ) -> dict: + ) -> JsonObject: _twopledict = rdf.twopledict_from_twopleset(blanknode) _jsonld = {} for _pred, _objectset in _twopledict.items(): @@ -111,9 +118,9 @@ def iri_as_jsonld( self, iri: str, tripledict: rdf.RdfTripleDictionary | None = None, - ): + ) -> JsonObject: if (not tripledict) or (iri not in tripledict) or self.__already_visiting(iri): - return self.iri_shorthand.compact_iri(iri) + return {'@id': self.iri_shorthand.compact_iri(iri)} with self.__visiting(iri): _nested_obj = ( {} @@ -131,7 +138,7 @@ def iri_as_jsonld( ) return _nested_obj - def _list_or_single_value(self, predicate_iri: str, objectlist: list): + def _list_or_single_value(self, predicate_iri: str, objectlist: list[JsonValue]) -> JsonValue: _only_one_object = ( (predicate_iri, RDF.type, OWL.FunctionalProperty) in self.thesaurus ) @@ -152,7 +159,7 @@ def _list_or_single_value(self, predicate_iri: str, objectlist: list): return sorted(objectlist, key=_naive_sort_key) @contextlib.contextmanager - def __visiting(self, iri: str): + def __visiting(self, iri: str) -> Iterator[None]: if self.__visiting_iris is None: self.__visiting_iris = set() self.__visiting_iris.add(iri) @@ -163,6 +170,6 @@ def __already_visiting(self, iri: str) -> bool: return bool(self.__visiting_iris and (iri in self.__visiting_iris)) -def _naive_sort_key(jsonable_obj): +def _naive_sort_key(jsonable_obj: Any) -> tuple[int, str]: _json = json.dumps(jsonable_obj) - return (len(_json), _json) + return len(_json), _json diff --git a/trove/render/simple_csv.py b/trove/render/simple_csv.py index dfca6e30c..52c9d700b 100644 --- a/trove/render/simple_csv.py +++ b/trove/render/simple_csv.py @@ -1,13 +1,15 @@ from __future__ import annotations from collections.abc import ( - Iterable, + Generator, Iterator, + Iterable, + Sequence, ) import csv import functools import itertools import dataclasses -import typing +from typing import TYPE_CHECKING, ClassVar from trove.trovesearch.search_params import ( CardsearchParams, @@ -18,12 +20,14 @@ from trove.vocab import osfmap from trove.vocab.namespaces import TROVE from ._simple_trovesearch import SimpleTrovesearchRenderer -from ._rendering import StreamableRendering -if typing.TYPE_CHECKING: +from ._rendering import StreamableRendering, ProtoRendering +if TYPE_CHECKING: from trove.util.trove_params import BasicTroveParams + from trove.util.json import JsonValue, JsonObject -Jsonpath = Iterable[str] # path of json keys +type Jsonpath = Sequence[str] # path of json keys +type CsvValue = str | int | float | None _MULTIVALUE_DELIMITER = ' ; ' # possible improvement: smarter in-value delimiting? _VALUE_KEY_PREFERENCE = ('@value', '@id', 'name', 'prefLabel', 'label') @@ -33,23 +37,27 @@ class TrovesearchSimpleCsvRenderer(SimpleTrovesearchRenderer): MEDIATYPE = mediatypes.CSV INDEXCARD_DERIVER_IRI = TROVE['derive/osfmap_json'] - CSV_DIALECT = csv.excel + CSV_DIALECT: ClassVar[type[csv.Dialect]] = csv.excel - def unicard_rendering(self, card_iri: str, osfmap_json: dict): - self.multicard_rendering(card_pages=iter([{card_iri: osfmap_json}])) + def unicard_rendering(self, card_iri: str, osfmap_json: JsonObject) -> ProtoRendering: + return self.multicard_rendering(card_pages=iter([{card_iri: osfmap_json}])) - def multicard_rendering(self, card_pages: Iterator[dict[str, dict]]): + def multicard_rendering(self, card_pages: Iterator[dict[str, JsonObject]]) -> ProtoRendering: _doc = TabularDoc( card_pages, trove_params=getattr(self.response_focus, 'search_params', None), ) - return StreamableRendering( + return StreamableRendering( # type: ignore[return-value] mediatype=self.MEDIATYPE, content_stream=csv_stream(self.CSV_DIALECT, _doc.header(), _doc.rows()), ) -def csv_stream(csv_dialect, header: list, rows: Iterator[list]) -> Iterator[str]: +def csv_stream( + csv_dialect: type[csv.Dialect], + header: list[CsvValue], + rows: Iterator[list[CsvValue]], +) -> Iterator[str]: _writer = csv.writer(_Echo(), dialect=csv_dialect) yield _writer.writerow(header) for _row in rows: @@ -58,7 +66,7 @@ def csv_stream(csv_dialect, header: list, rows: Iterator[list]) -> Iterator[str] @dataclasses.dataclass class TabularDoc: - card_pages: Iterator[dict[str, dict]] + card_pages: Iterator[dict[str, JsonObject]] trove_params: BasicTroveParams | None = None _started: bool = False @@ -71,11 +79,11 @@ def column_jsonpaths(self) -> tuple[Jsonpath, ...]: return (_ID_JSONPATH, *_column_jsonpaths) @functools.cached_property - def first_page(self) -> dict[str, dict]: + def first_page(self) -> dict[str, JsonObject]: return next(self.card_pages, {}) def _column_paths(self) -> Iterator[Propertypath]: - _pathlists: list[Iterable[Propertypath]] = [] + _pathlists: list[Sequence[Propertypath]] = [] if self.trove_params is not None: # hacks if GLOB_PATHSTEP in self.trove_params.attrpaths_by_type: _pathlists.append(self.trove_params.attrpaths_by_type[GLOB_PATHSTEP]) @@ -97,35 +105,35 @@ def _column_paths(self) -> Iterator[Propertypath]: return self.iter_unique(itertools.chain.from_iterable(_pathlists)) @staticmethod - def iter_unique(iterable): + def iter_unique[T](iterable: Iterable[T]) -> Generator[T]: _seen = set() for _item in iterable: if _item not in _seen: _seen.add(_item) yield _item - def _iter_card_pages(self): + def _iter_card_pages(self) -> Generator[dict[str, JsonObject]]: assert not self._started self._started = True if self.first_page: yield self.first_page yield from self.card_pages - def header(self) -> list[str]: + def header(self) -> list[CsvValue]: return ['.'.join(_path) for _path in self.column_jsonpaths] - def rows(self) -> Iterator[list[str]]: + def rows(self) -> Generator[list[CsvValue]]: for _page in self._iter_card_pages(): for _card_iri, _osfmap_json in _page.items(): yield self._row_values(_osfmap_json) - def _row_values(self, osfmap_json: dict) -> list[str]: + def _row_values(self, osfmap_json: JsonObject) -> list[CsvValue]: return [ self._row_field_value(osfmap_json, _field_path) for _field_path in self.column_jsonpaths ] - def _row_field_value(self, osfmap_json: dict, field_path: Jsonpath) -> str: + def _row_field_value(self, osfmap_json: JsonObject, field_path: Jsonpath) -> CsvValue: _rendered_values = [ _render_tabularly(_obj) for _obj in _iter_values(osfmap_json, field_path) @@ -136,7 +144,7 @@ def _row_field_value(self, osfmap_json: dict, field_path: Jsonpath) -> str: return _MULTIVALUE_DELIMITER.join(map(str, _rendered_values)) -def _osfmap_jsonpath(iri_path: Iterable[str]) -> Jsonpath: +def _osfmap_jsonpath(iri_path: Propertypath) -> Jsonpath: _shorthand = osfmap.osfmap_json_shorthand() return tuple( _shorthand.compact_iri(_pathstep) @@ -144,7 +152,7 @@ def _osfmap_jsonpath(iri_path: Iterable[str]) -> Jsonpath: ) -def _has_value(osfmap_json: dict, path: Jsonpath) -> bool: +def _has_value(osfmap_json: JsonObject, path: Jsonpath) -> bool: try: next(_iter_values(osfmap_json, path)) except StopIteration: @@ -153,7 +161,7 @@ def _has_value(osfmap_json: dict, path: Jsonpath) -> bool: return True -def _iter_values(osfmap_json: dict, path: Jsonpath) -> Iterator: +def _iter_values(osfmap_json: JsonObject, path: Jsonpath) -> Generator[JsonValue]: assert path (_step, *_rest) = path _val = osfmap_json.get(_step) @@ -162,7 +170,8 @@ def _iter_values(osfmap_json: dict, path: Jsonpath) -> Iterator: yield from _iter_values(_val, _rest) elif isinstance(_val, list): for _val_obj in _val: - yield from _iter_values(_val_obj, _rest) + if isinstance(_val_obj, dict): + yield from _iter_values(_val_obj, _rest) else: if isinstance(_val, list): yield from _val @@ -170,7 +179,7 @@ def _iter_values(osfmap_json: dict, path: Jsonpath) -> Iterator: yield _val -def _render_tabularly(json_val): +def _render_tabularly(json_val: JsonValue) -> CsvValue: if isinstance(json_val, (str, int, float)): return json_val if isinstance(json_val, dict): @@ -183,7 +192,7 @@ def _render_tabularly(json_val): else None ) if _val is not None: - return _val + return _render_tabularly(_val) return None @@ -192,5 +201,5 @@ class _Echo: from https://docs.djangoproject.com/en/5.1/howto/outputting-csv/#streaming-large-csv-files ''' - def write(self, line: str): + def write(self, line: str) -> str: return line diff --git a/trove/render/simple_json.py b/trove/render/simple_json.py index 480ef1c7f..753d6ee6e 100644 --- a/trove/render/simple_json.py +++ b/trove/render/simple_json.py @@ -1,3 +1,4 @@ +from __future__ import annotations import json import re import typing @@ -10,8 +11,10 @@ ) from trove.vocab import mediatypes from trove.vocab.namespaces import TROVE, RDF -from ._rendering import StreamableRendering +from ._rendering import StreamableRendering, ProtoRendering from ._simple_trovesearch import SimpleTrovesearchRenderer +if typing.TYPE_CHECKING: + from trove.util.json import JsonObject class TrovesearchSimpleJsonRenderer(SimpleTrovesearchRenderer): @@ -20,20 +23,20 @@ class TrovesearchSimpleJsonRenderer(SimpleTrovesearchRenderer): MEDIATYPE = mediatypes.JSON INDEXCARD_DERIVER_IRI = TROVE['derive/osfmap_json'] - def simple_unicard_rendering(self, card_iri, osfmap_json): + def simple_unicard_rendering(self, card_iri: str, osfmap_json: dict[str, typing.Any]) -> str: return json.dumps({ 'data': self._render_card_content(card_iri, osfmap_json), 'links': self._render_links(), 'meta': self._render_meta(), }, indent=2) - def multicard_rendering(self, card_pages: typing.Iterator[dict[str, dict]]): - return StreamableRendering( + def multicard_rendering(self, card_pages: typing.Iterator[dict[str, dict[str, typing.Any]]]) -> ProtoRendering: + return StreamableRendering( # type: ignore[return-value] mediatype=self.MEDIATYPE, content_stream=self._stream_json(card_pages), ) - def _stream_json(self, card_pages: typing.Iterator[dict[str, dict]]): + def _stream_json(self, card_pages: typing.Iterator[dict[str, typing.Any]]) -> typing.Generator[str]: _prefix = '{"data": [' yield _prefix _datum_prefix = None @@ -54,11 +57,11 @@ def _stream_json(self, card_pages: typing.Iterator[dict[str, dict]]): count=1, ) - def _render_card_content(self, card_iri: str, osfmap_json: dict): + def _render_card_content(self, card_iri: str, osfmap_json: JsonObject) -> JsonObject: self._add_twople(osfmap_json, 'foaf:isPrimaryTopicOf', card_iri) return osfmap_json - def _render_meta(self): + def _render_meta(self) -> dict[str, int | str]: _meta: dict[str, int | str] = {} try: _total = next(self.response_gathering.ask( @@ -75,7 +78,7 @@ def _render_meta(self): pass return _meta - def _render_links(self): + def _render_links(self) -> dict[str, typing.Any]: _links = {} for _pagelink in self._page_links: _twopledict = rdf.twopledict_from_twopleset(_pagelink) @@ -85,7 +88,7 @@ def _render_links(self): _links[_membername.unicode_value] = _link_url return _links - def _add_twople(self, json_dict, predicate_iri: str, object_iri: str): + def _add_twople(self, json_dict: dict[str, typing.Any], predicate_iri: str, object_iri: str) -> None: _obj_ref = {'@id': object_iri} _obj_list = json_dict.setdefault(predicate_iri, []) if isinstance(_obj_list, list): diff --git a/trove/render/simple_tsv.py b/trove/render/simple_tsv.py index 60eb4023b..30b01a8a6 100644 --- a/trove/render/simple_tsv.py +++ b/trove/render/simple_tsv.py @@ -7,4 +7,4 @@ class TrovesearchSimpleTsvRenderer(TrovesearchSimpleCsvRenderer): MEDIATYPE = mediatypes.TSV - CSV_DIALECT: type[csv.Dialect] = csv.excel_tab + CSV_DIALECT = csv.excel_tab diff --git a/trove/render/turtle.py b/trove/render/turtle.py index e8239b34f..869e12472 100644 --- a/trove/render/turtle.py +++ b/trove/render/turtle.py @@ -1,3 +1,5 @@ +from typing import Any + from primitive_metadata import primitive_rdf as rdf from trove.vocab.namespaces import TROVE @@ -9,7 +11,7 @@ class RdfTurtleRenderer(BaseRenderer): # include indexcard metadata as JSON literals (because QuotedGraph is non-standard) INDEXCARD_DERIVER_IRI = TROVE['derive/osfmap_json'] - def simple_render_document(self) -> str: + def simple_render_document(self) -> Any: return rdf.turtle_from_tripledict( self.response_data.tripledict, focus=self.response_focus.single_iri(), diff --git a/trove/trovebrowse_gathering.py b/trove/trovebrowse_gathering.py index 3da36167a..f8efb9a60 100644 --- a/trove/trovebrowse_gathering.py +++ b/trove/trovebrowse_gathering.py @@ -1,3 +1,6 @@ +from collections.abc import Generator +from typing import Any + from primitive_metadata import gather from primitive_metadata import primitive_rdf as rdf @@ -5,9 +8,10 @@ from trove.util.iris import get_sufficiently_unique_iri from trove.vocab import namespaces as ns from trove.vocab import static_vocab -from trove.vocab.trove import ( - TROVE_API_THESAURUS, -) +from trove.vocab.trove import TROVE_API_THESAURUS + + +type GathererGenerator = Generator[rdf.RdfTriple | rdf.RdfTwople] TROVEBROWSE_NORMS = gather.GatheringNorms.new( @@ -32,7 +36,7 @@ @trovebrowse.gatherer(ns.FOAF.isPrimaryTopicOf) -def gather_cards_focused_on(focus, *, blend_cards: bool): +def gather_cards_focused_on(focus: gather.Focus, *, blend_cards: bool) -> GathererGenerator: _identifier_qs = trove_db.ResourceIdentifier.objects.queryset_for_iris(focus.iris) _indexcard_qs = trove_db.Indexcard.objects.filter(focus_identifier_set__in=_identifier_qs) if blend_cards: @@ -46,7 +50,7 @@ def gather_cards_focused_on(focus, *, blend_cards: bool): @trovebrowse.gatherer(ns.TROVE.thesaurusEntry) -def gather_thesaurus_entry(focus, *, blend_cards: bool): +def gather_thesaurus_entry(focus: gather.Focus, *, blend_cards: bool) -> GathererGenerator: _thesaurus = static_vocab.combined_thesaurus__suffuniq() for _iri in focus.iris: _suffuniq_iri = get_sufficiently_unique_iri(_iri) @@ -59,5 +63,5 @@ def gather_thesaurus_entry(focus, *, blend_cards: bool): @trovebrowse.gatherer(ns.TROVE.usedAtPath) -def gather_paths_used_at(focus, **kwargs): +def gather_paths_used_at(focus: gather.Focus, **kwargs: Any) -> GathererGenerator: yield from () # TODO via elasticsearch aggregation diff --git a/trove/trovesearch/page_cursor.py b/trove/trovesearch/page_cursor.py index e5e5ee3ff..5bbdf5ac0 100644 --- a/trove/trovesearch/page_cursor.py +++ b/trove/trovesearch/page_cursor.py @@ -7,7 +7,7 @@ import typing from trove.exceptions import InvalidPageCursorValue - +from typing import Any __all__ = ('PageCursor', 'OffsetCursor', 'ReproduciblyRandomSampleCursor') @@ -110,17 +110,17 @@ def is_valid(self) -> bool: def is_first_page(self) -> bool: return self.start_offset == 0 - def next_cursor(self): + def next_cursor(self) -> OffsetCursor | None: _next = dataclasses.replace(self, start_offset=int(self.start_offset + self.bounded_page_size)) - return (_next if _next.is_valid() else None) + return _next if _next.is_valid() else None - def prev_cursor(self): + def prev_cursor(self) -> OffsetCursor | None: _prev = dataclasses.replace(self, start_offset=int(self.start_offset - self.bounded_page_size)) - return (_prev if _prev.is_valid() else None) + return _prev if _prev.is_valid() else None - def first_cursor(self): + def first_cursor(self) -> OffsetCursor | None: _first = dataclasses.replace(self, start_offset=0) - return (_first if _first.is_valid() else None) + return _first if _first.is_valid() else None @dataclasses.dataclass @@ -130,16 +130,16 @@ class ReproduciblyRandomSampleCursor(OffsetCursor): # start_offset: int (from OffsetCursor) first_page_ids: list[str] = dataclasses.field(default_factory=list) - def next_cursor(self): + def next_cursor(self) -> ReproduciblyRandomSampleCursor | None: return ( - super().next_cursor() + super().next_cursor() # type: ignore if self.first_page_ids else None ) - def prev_cursor(self): + def prev_cursor(self) -> ReproduciblyRandomSampleCursor | None: return ( - super().prev_cursor() + super().prev_cursor() # type: ignore if self.first_page_ids else None ) @@ -149,37 +149,37 @@ def prev_cursor(self): class SearchAfterCursor(PageCursor): # page_size: int (from PageCursor) # total_count: int (from PageCursor) - search_after: list | None = None - next_search_after: list | None = None - prev_search_after: list | None = None + search_after: list[Any] | None = None + next_search_after: list[Any] | None = None + prev_search_after: list[Any] | None = None def is_first_page(self) -> bool: return self.search_after is None - def next_cursor(self): + def next_cursor(self) -> SearchAfterCursor | None: _next = dataclasses.replace( self, search_after=self.next_search_after, next_search_after=None, ) - return (_next if _next.is_valid() else None) + return _next if _next.is_valid() else None - def prev_cursor(self): + def prev_cursor(self) -> SearchAfterCursor | None: _prev = dataclasses.replace( self, search_after=self.prev_search_after, next_search_after=self.search_after, ) - return (_prev if _prev.is_valid() else None) + return _prev if _prev.is_valid() else None - def first_cursor(self): + def first_cursor(self) -> SearchAfterCursor | None: _first = dataclasses.replace( self, search_after=None, next_search_after=None, prev_search_after=None, ) - return (_first if _first.is_valid() else None) + return _first if _first.is_valid() else None class _PageCursorTypes(enum.Enum): diff --git a/trove/trovesearch/search_handle.py b/trove/trovesearch/search_handle.py index 01dbffd84..b3ce4a8f7 100644 --- a/trove/trovesearch/search_handle.py +++ b/trove/trovesearch/search_handle.py @@ -39,7 +39,7 @@ class CardsearchHandle(BasicSearchHandle): search_result_page: typing.Iterable[CardsearchResult] = () related_propertypath_results: list[PropertypathUsage] = dataclasses.field(default_factory=list) - def __post_init__(self): + def __post_init__(self): # type: ignore _cursor = self.cursor _page = self.search_result_page if ( # TODO: move this logic into the... cursor? @@ -96,7 +96,7 @@ class CardsearchResult: card_pk: str = '' @property - def card_uuid(self): + def card_uuid(self) -> typing.Any: # card iri has the uuid at the end return primitive_rdf.iri_minus_namespace( self.card_iri, @@ -104,7 +104,7 @@ def card_uuid(self): ) @property - def card_id(self): + def card_id(self) -> str: return self.card_pk or self.card_uuid @@ -125,7 +125,7 @@ class ValuesearchResult: match_count: int = 0 total_count: int = 0 - def __post_init__(self): + def __post_init__(self) -> None: assert (self.value_iri is not None) or (self.value_value is not None), ( f'either value_iri or value_value required (on {self})' ) diff --git a/trove/trovesearch/search_params.py b/trove/trovesearch/search_params.py index b8bbf34a9..dfe047a49 100644 --- a/trove/trovesearch/search_params.py +++ b/trove/trovesearch/search_params.py @@ -1,5 +1,10 @@ from __future__ import annotations -import collections +from collections.abc import ( + Generator, + Mapping, + Collection, + Iterable, +) import dataclasses import enum import functools @@ -32,6 +37,8 @@ from trove.vocab import osfmap from trove.vocab.trove import trove_json_shorthand from trove.vocab.namespaces import RDF, TROVE, OWL, FOAF, DCTERMS +if typing.TYPE_CHECKING: + from primitive_metadata.primitive_rdf import IriShorthand logger = logging.getLogger(__name__) @@ -49,7 +56,7 @@ DEFAULT_PROPERTYPATH_SET: PropertypathSet = frozenset([ONE_GLOB_PROPERTYPATH]) -DEFAULT_INCLUDES_BY_TYPE: collections.abc.Mapping[str, frozenset[Propertypath]] = freeze({ +DEFAULT_INCLUDES_BY_TYPE: Mapping[str, frozenset[Propertypath]] = freeze({ TROVE.Indexcard: set(), TROVE.Cardsearch: { (TROVE.searchResultPage,), @@ -63,7 +70,7 @@ }, }) -DEFAULT_FIELDS_BY_TYPE: collections.abc.Mapping[str, tuple[Propertypath, ...]] = freeze({ +DEFAULT_FIELDS_BY_TYPE: Mapping[str, tuple[Propertypath, ...]] = freeze({ TROVE.Indexcard: [ (TROVE.resourceMetadata,), (TROVE.focusIdentifier,), @@ -93,12 +100,12 @@ class ValueType(enum.Enum): INTEGER = TROVE['value-type/integer'] @classmethod - def from_shortname(cls, shortname): + def from_shortname(cls, shortname: str) -> typing.Self: _iri = trove_json_shorthand().expand_iri(shortname) return cls(_iri) @classmethod - def shortnames(cls): + def shortnames(cls) -> Generator[str]: for _value_type in cls: yield _value_type.to_shortname() @@ -115,15 +122,15 @@ class TrovesearchParams(BasicTroveParams): static_focus_type: typing.ClassVar[str] # expected on subclasses @classmethod - def _default_shorthand(cls): # NOTE: osfmap special + def _default_shorthand(cls) -> IriShorthand: # NOTE: osfmap special return osfmap.osfmap_json_shorthand() @classmethod - def _default_include(cls): + def _default_include(cls) -> PropertypathSet: return DEFAULT_INCLUDES_BY_TYPE.get(cls.static_focus_type, frozenset()) @classmethod - def _default_attrpaths(cls) -> collections.abc.Mapping[str, tuple[Propertypath, ...]]: + def _default_attrpaths(cls) -> Mapping[str, tuple[Propertypath, ...]]: return DEFAULT_FIELDS_BY_TYPE @@ -133,17 +140,19 @@ class SearchText: propertypath_set: PropertypathSet = DEFAULT_PROPERTYPATH_SET @classmethod - def from_queryparam_family(cls, queryparams: QueryparamDict, queryparam_family: str): + def from_queryparam_family(cls, queryparams: QueryparamDict, queryparam_family: str) -> frozenset[typing.Self]: return frozenset(cls.iter_from_queryparam_family(queryparams, queryparam_family)) @classmethod - def iter_from_queryparam_family(cls, queryparams: QueryparamDict, queryparam_family: str): + def iter_from_queryparam_family(cls, queryparams: QueryparamDict, queryparam_family: str) -> Generator[typing.Self]: for (_param_name, _param_value) in queryparams.get(queryparam_family, ()): if _param_value: - yield cls.from_searchtext_param_or_none(_param_name, _param_value) + _searchtext = cls.from_searchtext_param_or_none(_param_name, _param_value) + if _searchtext is not None: + yield _searchtext @classmethod - def from_searchtext_param_or_none(cls, param_name: QueryparamName, param_value: str) -> SearchText | None: + def from_searchtext_param_or_none(cls, param_name: QueryparamName, param_value: str) -> typing.Self | None: _propertypath_set = ( frozenset(osfmap.parse_osfmap_propertypath_set(param_name.bracketed_names[0], allow_globs=True)) if param_name.bracketed_names @@ -161,16 +170,17 @@ def from_searchtext_param_or_none(cls, param_name: QueryparamName, param_value: return _searchtext @classmethod - def queryparams_from_searchtext(self, queryparam_family: str, cardsearch_searchtext): - _by_propertypath_set = collections.defaultdict(set) + def queryparams_from_searchtext( + self, + queryparam_family: str, + cardsearch_searchtext: Iterable[SearchText], + ) -> Generator[tuple[str, str]]: for searchtext in cardsearch_searchtext: - _by_propertypath_set[searchtext.propertypath_set].add(searchtext) - for _propertypath_set, _combinable_segments in _by_propertypath_set.items(): _qp_name = QueryparamName( queryparam_family, - (osfmap.osfmap_propertypath_set_key(_propertypath_set),), + (osfmap.osfmap_propertypath_set_key(searchtext.propertypath_set),) ) - yield str(_qp_name), _combinable_segments + yield str(_qp_name), searchtext.text @dataclasses.dataclass(frozen=True) @@ -186,20 +196,20 @@ class FilterOperator(enum.Enum): AT_DATE = TROVE['at-date'] @classmethod - def from_shortname(cls, shortname): + def from_shortname(cls, shortname: str) -> typing.Self: _iri = trove_json_shorthand().expand_iri(shortname) return cls(_iri) def to_shortname(self) -> str: return trove_json_shorthand().compact_iri(self.value) - def is_date_operator(self): + def is_date_operator(self) -> bool: return self in (self.BEFORE, self.AFTER, self.AT_DATE) - def is_iri_operator(self): + def is_iri_operator(self) -> bool: return self in (self.ANY_OF, self.NONE_OF) - def is_valueless_operator(self): + def is_valueless_operator(self) -> bool: return self in (self.IS_PRESENT, self.IS_ABSENT) operator: FilterOperator @@ -207,7 +217,7 @@ def is_valueless_operator(self): propertypath_set: PropertypathSet = DEFAULT_PROPERTYPATH_SET @classmethod - def from_queryparam_family(cls, queryparams: QueryparamDict, queryparam_family: str): + def from_queryparam_family(cls, queryparams: QueryparamDict, queryparam_family: str) -> frozenset[typing.Self]: return frozenset( cls.from_filter_param(param_name, param_value) for (param_name, param_value) @@ -215,7 +225,7 @@ def from_queryparam_family(cls, queryparams: QueryparamDict, queryparam_family: ) @classmethod - def from_filter_param(cls, param_name: QueryparamName, param_value: str): + def from_filter_param(cls, param_name: QueryparamName, param_value: str) -> typing.Self: _operator = None try: # "filter[][]" (_serialized_path_set, _operator_value) = param_name.bracketed_names @@ -281,7 +291,7 @@ def is_type_filter(self) -> bool: and self.operator == SearchFilter.FilterOperator.ANY_OF ) - def as_queryparam(self, queryparam_family: str): + def as_queryparam(self, queryparam_family: str) -> tuple[str, str]: _qp_name = QueryparamName(queryparam_family, ( osfmap.osfmap_propertypath_set_key(self.propertypath_set), self.operator.to_shortname(), @@ -338,7 +348,7 @@ def _from_sort_queryparam( descending=_descending, ) - def __post_init__(self): + def __post_init__(self) -> None: if ( self.value_type == ValueType.DATE and not is_date_path(self.propertypath) @@ -375,7 +385,7 @@ class CardsearchParams(TrovesearchParams): static_focus_type = TROVE.Cardsearch @classmethod - def parse_queryparams(cls, queryparams: QueryparamDict) -> dict: + def parse_queryparams(cls, queryparams: QueryparamDict) -> dict[str, typing.Any]: _filter_set = SearchFilter.from_queryparam_family(queryparams, 'cardSearchFilter') return { **super().parse_queryparams(queryparams), @@ -394,7 +404,7 @@ def related_property_paths(self) -> tuple[Propertypath, ...]: else () ) - def cardsearch_type_iris(self): + def cardsearch_type_iris(self) -> Generator[str]: for _filter in self.cardsearch_filter_set: if _filter.is_type_filter(): yield from _filter.value_set @@ -424,7 +434,7 @@ def to_querydict(self) -> QueryDict: if not self.page_cursor.is_basic(): _querydict['page[cursor]'] = self.page_cursor.as_queryparam_value() elif self.page_cursor.page_size != DEFAULT_PAGE_SIZE: - _querydict['page[size]'] = self.page_cursor.page_size + _querydict['page[size]'] = str(self.page_cursor.page_size) for _filter in self.cardsearch_filter_set: _qp_name, _qp_value = _filter.as_queryparam('cardSearchFilter') _querydict.appendlist(_qp_name, _qp_value) @@ -456,7 +466,7 @@ def parse_queryparams(cls, queryparams: QueryparamDict) -> dict: 'valuesearch_filter_set': SearchFilter.from_queryparam_family(queryparams, 'valueSearchFilter'), } - def __post_init__(self): + def __post_init__(self) -> None: if osfmap.is_date_property(self.valuesearch_propertypath[-1]): # date-value limitations if self.valuesearch_searchtext: @@ -468,7 +478,7 @@ def __post_init__(self): 'valueSearchFilter may not be used with valueSearchPropertyPath leading to a "date" property', ) - def to_querydict(self): + def to_querydict(self) -> QueryDict: _querydict = super().to_querydict() _querydict['valueSearchPropertyPath'] = osfmap.osfmap_propertypath_key(self.valuesearch_propertypath) for _qp_name, _qp_value in SearchText.queryparams_from_searchtext('valueSearchText', self.valuesearch_searchtext): @@ -478,12 +488,12 @@ def to_querydict(self): _querydict.appendlist(_qp_name, _qp_value) return _querydict - def valuesearch_iris(self): + def valuesearch_iris(self) -> Generator[str]: for _filter in self.valuesearch_filter_set: if _filter.is_sameas_filter(): yield from _filter.value_set - def valuesearch_type_iris(self): + def valuesearch_type_iris(self) -> Generator[str]: for _filter in self.valuesearch_filter_set: if _filter.is_type_filter(): yield from _filter.value_set @@ -506,24 +516,16 @@ def _get_text_queryparam(queryparams: QueryparamDict, queryparam_family: str) -> ) -def _get_related_property_paths(filter_set) -> tuple[Propertypath, ...]: +def _get_related_property_paths(filter_set: Collection[SearchFilter]) -> tuple[Propertypath, ...]: # hard-coded for osf.io search pages, static list per type # TODO: replace with some dynamism, maybe a 'significant_terms' aggregation - _type_iris = set() + _type_iris: set[str] = set() for _filter in filter_set: if _filter.is_type_filter(): _type_iris.update(_filter.value_set) return osfmap.suggested_property_paths(_type_iris) -def _get_unnamed_iri_values(filter_set) -> typing.Iterable[str]: - for _filter in filter_set: - if _filter.operator.is_iri_operator(): - for _iri in _filter.value_set: - if _iri not in osfmap.OSFMAP_THESAURUS: - yield _iri - - def _get_page_cursor(queryparams: QueryparamDict) -> PageCursor: _cursor_value = get_single_value(queryparams, QueryparamName('page', ('cursor',))) if _cursor_value: @@ -538,5 +540,5 @@ def _get_page_cursor(queryparams: QueryparamDict) -> PageCursor: return PageCursor(page_size=_size) -def _frozen_mapping(**kwargs) -> collections.abc.Mapping: +def _frozen_mapping(**kwargs: dict[str, typing.Any]) -> Mapping[str, typing.Any]: return types.MappingProxyType(kwargs) diff --git a/trove/trovesearch/trovesearch_gathering.py b/trove/trovesearch/trovesearch_gathering.py index 4f548774d..14138cbf0 100644 --- a/trove/trovesearch/trovesearch_gathering.py +++ b/trove/trovesearch/trovesearch_gathering.py @@ -1,30 +1,14 @@ +from __future__ import annotations import dataclasses import logging import urllib.parse -from typing import ClassVar, Any, Iterator, Iterable - -from primitive_metadata.primitive_rdf import ( - Literal, - blanknode, - iri_minus_namespace, - literal, - sequence, -) +from typing import ClassVar, Any, TYPE_CHECKING + from primitive_metadata import gather from primitive_metadata import primitive_rdf as rdf from trove import models as trove_db from trove.derive.osfmap_json import _RdfOsfmapJsonldRenderer -from trove.trovesearch.page_cursor import PageCursor -from trove.trovesearch.search_params import ( - CardsearchParams, - ValuesearchParams, -) -from trove.trovesearch.search_handle import ( - CardsearchHandle, - ValuesearchHandle, - ValuesearchResult, -) from trove.util.iris import get_sufficiently_unique_iri from trove.vocab.namespaces import RDF, FOAF, DCTERMS, RDFS, DCAT, TROVE from trove.vocab.jsonapi import ( @@ -36,15 +20,36 @@ TROVE_API_THESAURUS, trove_indexcard_namespace, ) +if TYPE_CHECKING: + from collections.abc import Iterator, Iterable, Generator + from trove.trovesearch.page_cursor import PageCursor + from trove.trovesearch.search_handle import ( + CardsearchHandle, + CardsearchResult, + ValuesearchHandle, + ValuesearchResult, + ) + from trove.trovesearch.search_params import ( + CardsearchParams, + SearchFilter, + ValuesearchParams, + ) + from trove.util.propertypath import ( + Propertypath, + PropertypathSet, + ) logger = logging.getLogger(__name__) +type GathererGenerator = Generator[rdf.RdfTriple | rdf.RdfTwople] + + TROVE_GATHERING_NORMS = gather.GatheringNorms.new( namestory=( - literal('cardsearch', language='en'), - literal('search for "index cards" that describe resources', language='en'), + rdf.literal('cardsearch', language='en'), + rdf.literal('search for "index cards" that describe resources', language='en'), ), focustype_iris={ TROVE.Indexcard, @@ -58,7 +63,7 @@ trovesearch_by_indexstrategy = gather.GatheringOrganizer( namestory=( - literal('trove search', language='en'), + rdf.literal('trove search', language='en'), ), norms=TROVE_GATHERING_NORMS, gatherer_params={ @@ -68,12 +73,12 @@ ) -class _TypedFocus(gather.Focus): +class _TypedFocus(gather.Focus): # type: ignore TYPE_IRI: ClassVar[str] # (expected on subclasses) ADDITIONAL_TYPE_IRIS: ClassVar[tuple[str, ...]] = () # (optional on subclasses) @classmethod - def new(cls, *args, type_iris=(), **kwargs): + def new(cls, *args: Any, type_iris: Iterable[str] = (), **kwargs: Any) -> Any: return super().new( *args, # add type_iri to new Focus instance @@ -106,8 +111,8 @@ class ValuesearchFocus(_TypedFocus): @dataclasses.dataclass(frozen=True) class IndexcardFocus(_TypedFocus): - TYPE_IRI = TROVE.Indexcard - ADDITIONAL_TYPE_IRIS = (DCAT.CatalogRecord,) + TYPE_IRI: ClassVar[str] = TROVE.Indexcard + ADDITIONAL_TYPE_IRIS: ClassVar[tuple[str, ...]] = (DCAT.CatalogRecord,) # additional dataclass fields indexcard: trove_db.Indexcard = dataclasses.field(compare=False) @@ -126,19 +131,19 @@ class IndexcardFocus(_TypedFocus): @trovesearch_by_indexstrategy.gatherer(TROVE.propertyPath, focustype_iris={TROVE.Valuesearch}) -def gather_valuesearch_propertypath(focus: ValuesearchFocus, **kwargs): +def gather_valuesearch_propertypath(focus: ValuesearchFocus, **kwargs: Any) -> GathererGenerator: yield from _single_propertypath_twoples(focus.search_params.valuesearch_propertypath) @trovesearch_by_indexstrategy.gatherer(TROVE.valueSearchFilter) -def gather_valuesearch_filter(focus, **kwargs): +def gather_valuesearch_filter(focus: ValuesearchFocus, **kwargs: Any) -> GathererGenerator: for _filter in focus.search_params.valuesearch_filter_set: - yield (TROVE.valueSearchFilter, _filter_as_blanknode(_filter)) + yield TROVE.valueSearchFilter, _filter_as_blanknode(_filter) @trovesearch_by_indexstrategy.gatherer(TROVE.totalResultCount) -def gather_count(focus: CardsearchFocus, **kwargs): - yield (TROVE.totalResultCount, focus.search_handle.total_result_count) +def gather_count(focus: CardsearchFocus, **kwargs: Any) -> GathererGenerator: + yield TROVE.totalResultCount, focus.search_handle.total_result_count @trovesearch_by_indexstrategy.gatherer( @@ -146,7 +151,7 @@ def gather_count(focus: CardsearchFocus, **kwargs): focustype_iris={TROVE.Cardsearch}, cache_bound=1, # only the first page gets cached ) -def gather_cardsearch_page(focus: CardsearchFocus, *, deriver_iri, blend_cards, **kwargs): +def gather_cardsearch_page(focus: CardsearchFocus, *, deriver_iri: str, blend_cards: bool, **kwargs: Any) -> GathererGenerator: # each searchResultPage a sequence of search results _current_handle: CardsearchHandle | None = focus.search_handle while _current_handle is not None: @@ -166,11 +171,11 @@ def gather_cardsearch_page(focus: CardsearchFocus, *, deriver_iri, blend_cards, ) _result_page.append(_result_obj) yield from _triples - yield (TROVE.searchResultPage, sequence(_result_page)) + yield (TROVE.searchResultPage, rdf.sequence(_result_page)) _current_handle = _current_handle.get_next_streaming_handle() -def _blended_card(card_focus) -> tuple[rdf.RdfObject, Iterable[rdf.RdfTriple]]: +def _blended_card(card_focus: IndexcardFocus) -> tuple[rdf.RdfObject, Iterable[rdf.RdfTriple]]: _metadata = card_focus.resourceMetadata if isinstance(_metadata, rdf.Literal): return (_metadata, ()) @@ -179,19 +184,19 @@ def _blended_card(card_focus) -> tuple[rdf.RdfObject, Iterable[rdf.RdfTriple]]: return (card_focus.single_iri(), ()) # oh well -def _unblended_card(_result, _card_focus) -> tuple[rdf.RdfObject, Iterable[rdf.RdfTriple]]: +def _unblended_card(_result: CardsearchResult, _card_focus: IndexcardFocus) -> tuple[rdf.RdfObject, Iterable[rdf.RdfTriple]]: return ( _unblended_cardsearch_result(_result), _unblended_card_triples(_result, _card_focus), ) -def _unblended_cardsearch_result(_result) -> rdf.RdfBlanknode: +def _unblended_cardsearch_result(_result: CardsearchResult) -> rdf.RdfBlanknode: _text_evidence_twoples = ( (TROVE.matchEvidence, frozenset(( (RDF.type, TROVE.TextMatchEvidence), (TROVE.matchingHighlight, _evidence.matching_highlight), - (TROVE.evidenceCardIdentifier, literal(_evidence.card_iri)), + (TROVE.evidenceCardIdentifier, rdf.literal(_evidence.card_iri)), *_single_propertypath_twoples(_evidence.property_path), ))) for _evidence in _result.text_match_evidence @@ -203,7 +208,7 @@ def _unblended_cardsearch_result(_result) -> rdf.RdfBlanknode: )) -def _unblended_card_triples(_result, _card_focus) -> Iterator[rdf.RdfTriple]: +def _unblended_card_triples(_result: CardsearchResult, _card_focus: IndexcardFocus) -> Iterator[rdf.RdfTriple]: # hack around (current) limitations of primitive_metadata.gather # (what with all these intermediate blank nodes and sequences): # yield trove:resourceMetadata here (instead of another gatherer) @@ -219,7 +224,7 @@ def _unblended_card_triples(_result, _card_focus) -> Iterator[rdf.RdfTriple]: @trovesearch_by_indexstrategy.gatherer(TROVE.searchResultPage) -def gather_page_links(focus, **kwargs): +def gather_page_links(focus: CardsearchFocus, **kwargs: Any) -> GathererGenerator: # links to more pages of results yield from _search_page_links(focus, focus.search_params) @@ -228,34 +233,34 @@ def gather_page_links(focus, **kwargs): TROVE.relatedPropertyList, focustype_iris={TROVE.Cardsearch}, ) -def gather_related_properties(focus, **kwargs): +def gather_related_properties(focus: CardsearchFocus, **kwargs: Any) -> GathererGenerator: # info about related properties (for refining/filtering further) - _prop_usage_counts = { + _prop_usage_counts: dict[tuple[str, ...], int] = { _prop_result.property_path: _prop_result.usage_count for _prop_result in focus.search_handle.related_propertypath_results } - _relatedproperty_list = [ + _relatedproperty_list: list[frozenset[tuple[str, Any]]] = [ _related_property_result(_propertypath, _prop_usage_counts.get(_propertypath, 0)) for _propertypath in focus.search_params.related_property_paths ] if _relatedproperty_list: - yield (TROVE.relatedPropertyList, sequence(_relatedproperty_list)) + yield TROVE.relatedPropertyList, rdf.sequence(_relatedproperty_list) @trovesearch_by_indexstrategy.gatherer(TROVE.cardSearchFilter) -def gather_cardsearch_filter(focus, **kwargs): +def gather_cardsearch_filter(focus: CardsearchFocus, **kwargs: Any) -> GathererGenerator: # filter-values from search params for _filter in focus.search_params.cardsearch_filter_set: - yield (TROVE.cardSearchFilter, _filter_as_blanknode(_filter)) + yield TROVE.cardSearchFilter, _filter_as_blanknode(_filter) @trovesearch_by_indexstrategy.gatherer( TROVE.searchResultPage, focustype_iris={TROVE.Valuesearch}, ) -def gather_valuesearch_page(focus: ValuesearchFocus, *, deriver_iri, blend_cards, **kwargs): - _result_page = [] - _value_iris = { +def gather_valuesearch_page(focus: ValuesearchFocus, *, deriver_iri: str, blend_cards: bool, **kwargs: Any) -> GathererGenerator: + _result_page: list[rdf.Blanknode] = [] + _value_iris: set[str] = { _result.value_iri for _result in focus.search_handle.search_result_page or () if _result.value_iri @@ -270,7 +275,7 @@ def gather_valuesearch_page(focus: ValuesearchFocus, *, deriver_iri, blend_cards for _identifier in _focus.indexcard.focus_identifier_set.all() } for _result in focus.search_handle.search_result_page or (): - _indexcard_obj = None + _indexcard_obj: rdf.Blanknode | None = None if _result.value_iri is not None: _card_focus = _card_foci_by_suffuniq_iri.get( get_sufficiently_unique_iri(_result.value_iri), @@ -288,23 +293,23 @@ def gather_valuesearch_page(focus: ValuesearchFocus, *, deriver_iri, blend_cards resource_metadata=_card_focus.resourceMetadata, ) for _pred, _obj in _card_twoples: - yield (_indexcard_obj, _pred, _obj) + yield _indexcard_obj, _pred, _obj if _indexcard_obj is None: # no actual indexcard; put what we know in a blanknode-indexcard _indexcard_obj = _valuesearch_result_as_indexcard_blanknode(_result) - _result_page.append(blanknode({ + _result_page.append(rdf.blanknode({ RDF.type: {TROVE.SearchResult}, TROVE.cardsearchResultCount: {_result.match_count}, TROVE.indexCard: {_indexcard_obj}, })) - yield (TROVE.searchResultPage, sequence(_result_page)) + yield TROVE.searchResultPage, rdf.sequence(_result_page) @trovesearch_by_indexstrategy.gatherer( TROVE.totalResultCount, focustype_iris={TROVE.Valuesearch}, ) -def gather_valuesearch_count(focus, **kwargs): +def gather_valuesearch_count(focus: ValuesearchFocus, **kwargs: Any) -> GathererGenerator: yield (TROVE.totalResultCount, focus.search_handle.total_result_count) @@ -325,13 +330,13 @@ def gather_valuesearch_count(focus, **kwargs): @trovesearch_by_indexstrategy.gatherer(DCTERMS.issued, focustype_iris={TROVE.Indexcard}) -def gather_card_issued(focus: IndexcardFocus, **kwargs): - yield (DCTERMS.issued, focus.indexcard.created.date()) +def gather_card_issued(focus: IndexcardFocus, **kwargs: Any) -> GathererGenerator: + yield DCTERMS.issued, focus.indexcard.created.date() @trovesearch_by_indexstrategy.gatherer(DCTERMS.modified, focustype_iris={TROVE.Indexcard}) -def gather_card_modified(focus: IndexcardFocus, **kwargs): - yield (DCTERMS.modified, focus.indexcard.modified.date()) +def gather_card_modified(focus: IndexcardFocus, **kwargs: Any) -> GathererGenerator: + yield DCTERMS.modified, focus.indexcard.modified.date() @trovesearch_by_indexstrategy.gatherer( @@ -339,18 +344,18 @@ def gather_card_modified(focus: IndexcardFocus, **kwargs): TROVE.focusIdentifier, focustype_iris={TROVE.Indexcard}, ) -def gather_primary_topic(focus: IndexcardFocus, **kwargs): +def gather_primary_topic(focus: IndexcardFocus, **kwargs: Any) -> GathererGenerator: for _identifier in focus.indexcard.focus_identifier_set.all(): _iri = _identifier.as_iri() yield (FOAF.primaryTopic, _iri) - yield (TROVE.focusIdentifier, literal(_iri)) + yield (TROVE.focusIdentifier, rdf.literal(_iri)) @trovesearch_by_indexstrategy.gatherer( TROVE.resourceMetadata, focustype_iris={TROVE.Indexcard}, ) -def gather_card_contents(focus: IndexcardFocus, *, deriver_iri, **kwargs): +def gather_card_contents(focus: IndexcardFocus, *, deriver_iri: str, **kwargs: Any) -> GathererGenerator: if focus.resourceMetadata is not None: yield (TROVE.resourceMetadata, focus.resourceMetadata) else: @@ -360,7 +365,12 @@ def gather_card_contents(focus: IndexcardFocus, *, deriver_iri, **kwargs): yield (TROVE.resourceMetadata, _loaded_metadata) -def _load_cards_and_contents(*, card_iris=None, value_iris=None, deriver_iri) -> dict[str, IndexcardFocus]: +def _load_cards_and_contents( + *, + card_iris: Iterable[str] | None = None, + value_iris: Iterable[str] | None = None, + deriver_iri: str, +) -> dict[str, IndexcardFocus]: return ( _load_cards_and_extracted_rdf_contents(card_iris, value_iris) if deriver_iri is None @@ -368,7 +378,10 @@ def _load_cards_and_contents(*, card_iris=None, value_iris=None, deriver_iri) -> ) -def _load_cards_and_extracted_rdf_contents(card_iris=None, value_iris=None) -> dict[str, IndexcardFocus]: +def _load_cards_and_extracted_rdf_contents( + card_iris: Iterable[str] | None = None, + value_iris: Iterable[str] | None = None, +) -> dict[str, IndexcardFocus]: _card_namespace = trove_indexcard_namespace() _resource_description_qs = ( trove_db.LatestResourceDescription.objects @@ -377,7 +390,7 @@ def _load_cards_and_extracted_rdf_contents(card_iris=None, value_iris=None) -> d ) if card_iris is not None: _indexcard_uuids = { - iri_minus_namespace(_card_iri, namespace=_card_namespace) + rdf.iri_minus_namespace(_card_iri, namespace=_card_namespace) for _card_iri in card_iris } _resource_description_qs = _resource_description_qs.filter(indexcard__uuid__in=_indexcard_uuids) @@ -404,7 +417,11 @@ def _load_cards_and_extracted_rdf_contents(card_iris=None, value_iris=None) -> d return _card_foci -def _load_cards_and_derived_contents(card_iris, value_iris, deriver_iri: str) -> dict[str, IndexcardFocus]: +def _load_cards_and_derived_contents( + card_iris: Iterable[str] | None, + value_iris: Iterable[str] | None, + deriver_iri: str, +) -> dict[str, IndexcardFocus]: _card_namespace = trove_indexcard_namespace() # include pre-formatted data from a DerivedIndexcard _derived_indexcard_qs = ( @@ -420,7 +437,7 @@ def _load_cards_and_derived_contents(card_iris, value_iris, deriver_iri: str) -> ) if card_iris is not None: _indexcard_uuids = { - iri_minus_namespace(_card_iri, namespace=_card_namespace) + rdf.iri_minus_namespace(_card_iri, namespace=_card_namespace) for _card_iri in card_iris } _derived_indexcard_qs = _derived_indexcard_qs.filter( @@ -447,7 +464,7 @@ def _load_cards_and_derived_contents(card_iris, value_iris, deriver_iri: str) -> ### # local helpers -def _filter_as_blanknode(search_filter) -> frozenset: +def _filter_as_blanknode(search_filter: SearchFilter) -> frozenset[rdf.RdfTwople]: _filter_twoples = [ (TROVE.filterType, search_filter.operator.value), *_multi_propertypath_twoples(search_filter.propertypath_set), @@ -462,7 +479,7 @@ def _filter_as_blanknode(search_filter) -> frozenset: return frozenset(_filter_twoples) -def _osfmap_or_unknown_iri_as_json(iri: str): +def _osfmap_or_unknown_iri_as_json(iri: str) -> rdf.Literal: try: _twopledict = osfmap.OSFMAP_THESAURUS[iri] except KeyError: @@ -471,12 +488,12 @@ def _osfmap_or_unknown_iri_as_json(iri: str): return _osfmap_json({iri: _twopledict}, focus_iri=iri) -def _valuesearch_result_as_json(result: ValuesearchResult) -> Literal: +def _valuesearch_result_as_json(result: ValuesearchResult) -> rdf.Literal: _value_twopledict = { RDF.type: set(result.value_type), - FOAF.name: set(map(literal, result.name_text)), - DCTERMS.title: set(map(literal, result.title_text)), - RDFS.label: set(map(literal, result.label_text)), + FOAF.name: set(map(rdf.literal, result.name_text)), + DCTERMS.title: set(map(rdf.literal, result.title_text)), + RDFS.label: set(map(rdf.literal, result.label_text)), } return ( _osfmap_json({result.value_iri: _value_twopledict}, result.value_iri) @@ -489,55 +506,55 @@ def _unblended_indexcard_twoples( focus_identifiers: Iterable[str], resource_metadata: rdf.Literal, ) -> Iterator[rdf.RdfTwople]: - yield (RDF.type, TROVE.Indexcard) + yield RDF.type, TROVE.Indexcard for _identifier in focus_identifiers: yield (TROVE.focusIdentifier, ( _identifier if isinstance(_identifier, rdf.Literal) - else literal(_identifier) + else rdf.literal(_identifier) )) - yield (TROVE.resourceMetadata, resource_metadata) + yield TROVE.resourceMetadata, resource_metadata -def _valuesearch_result_as_indexcard_blanknode(result: ValuesearchResult) -> frozenset: +def _valuesearch_result_as_indexcard_blanknode(result: ValuesearchResult) -> rdf.Blanknode: return frozenset(_unblended_indexcard_twoples( - focus_identifiers=[literal(result.value_iri or result.value_value)], + focus_identifiers=[rdf.literal(result.value_iri or result.value_value)], resource_metadata=_valuesearch_result_as_json(result), )) -def _osfmap_json(tripledict, focus_iri): +def _osfmap_json(tripledict: rdf.RdfTripleDictionary, focus_iri: str) -> rdf.Literal: return rdf.literal_json( _RdfOsfmapJsonldRenderer().tripledict_as_nested_jsonld(tripledict, focus_iri) ) -def _osfmap_twople_json(twopledict): +def _osfmap_twople_json(twopledict: rdf.RdfTwopleDictionary) -> rdf.Literal: return rdf.literal_json( _RdfOsfmapJsonldRenderer().twopledict_as_jsonld(twopledict) ) -def _osfmap_path(property_path): +def _osfmap_path(property_path: Propertypath) -> rdf.Literal: return rdf.literal_json([ osfmap.osfmap_json_shorthand().compact_iri(_iri) for _iri in property_path ]) -def _single_propertypath_twoples(property_path: tuple[str, ...]): - yield (TROVE.propertyPathKey, literal(osfmap.osfmap_propertypath_key(property_path))) +def _single_propertypath_twoples(property_path: Propertypath) -> Generator[rdf.RdfTwople]: + yield (TROVE.propertyPathKey, rdf.literal(osfmap.osfmap_propertypath_key(property_path))) yield (TROVE.propertyPath, _propertypath_sequence(property_path)) yield (TROVE.osfmapPropertyPath, _osfmap_path(property_path)) -def _multi_propertypath_twoples(propertypath_set): - yield (TROVE.propertyPathKey, literal(osfmap.osfmap_propertypath_set_key(propertypath_set))) +def _multi_propertypath_twoples(propertypath_set: PropertypathSet) -> Generator[rdf.RdfTwople]: + yield (TROVE.propertyPathKey, rdf.literal(osfmap.osfmap_propertypath_set_key(propertypath_set))) for _path in propertypath_set: - yield (TROVE.propertyPathSet, _propertypath_sequence(_path)) + yield TROVE.propertyPathSet, _propertypath_sequence(_path) -def _propertypath_sequence(property_path: tuple[str, ...]): +def _propertypath_sequence(property_path: Propertypath) -> Any: _propertypath_metadata = [] for _property_iri in property_path: try: @@ -548,24 +565,24 @@ def _propertypath_sequence(property_path: tuple[str, ...]): {_property_iri: _property_twopledict}, focus_iri=_property_iri, )) - return sequence(_propertypath_metadata) + return rdf.sequence(_propertypath_metadata) -def _related_property_result(property_path: tuple[str, ...], count: int): +def _related_property_result(property_path: Propertypath, count: int) -> rdf.Blanknode: return frozenset(( (RDF.type, TROVE.RelatedPropertypath), (TROVE.cardsearchResultCount, count), - (TROVE.suggestedFilterOperator, literal(osfmap.osfmap_json_shorthand().compact_iri( + (TROVE.suggestedFilterOperator, rdf.literal(osfmap.osfmap_json_shorthand().compact_iri( osfmap.suggested_filter_operator(property_path[-1]), ))), *_single_propertypath_twoples(property_path), )) -def _search_page_links(search_focus, search_params): +def _search_page_links(search_focus: Any, search_params: Any) -> Iterator[rdf.RdfTwople]: _search_iri_split = urllib.parse.urlsplit(next(iter(search_focus.iris))) - def _iri_with_cursor(page_cursor: PageCursor): + def _iri_with_cursor(page_cursor: PageCursor) -> Any: return urllib.parse.urlunsplit(( _search_iri_split.scheme, _search_iri_split.netloc, @@ -585,9 +602,9 @@ def _iri_with_cursor(page_cursor: PageCursor): yield (TROVE.searchResultPage, _jsonapi_link('first', _iri_with_cursor(_first))) -def _jsonapi_link(membername, iri): +def _jsonapi_link(membername: str, iri: str) -> frozenset[tuple[str, Any]]: return frozenset(( (RDF.type, JSONAPI_LINK_OBJECT), - (JSONAPI_MEMBERNAME, literal(membername)), + (JSONAPI_MEMBERNAME, rdf.literal(membername)), (RDF.value, iri), )) diff --git a/trove/util/chainmap.py b/trove/util/chainmap.py index 48a1be487..2af461a2e 100644 --- a/trove/util/chainmap.py +++ b/trove/util/chainmap.py @@ -1,9 +1,10 @@ from collections.abc import Sequence, Mapping, Iterator import dataclasses +from typing import Self @dataclasses.dataclass -class SimpleChainMap(Mapping): +class SimpleChainMap[K, V](Mapping[K, V]): """Combine multiple mappings for sequential lookup. (inspired by rejecting the suggested "greatly simplified read-only version of Chainmap" @@ -41,9 +42,9 @@ class SimpleChainMap(Mapping): >>> _map.with_new({'a': 17}).get('a') 17 """ - maps: Sequence[Mapping] + maps: Sequence[Mapping[K, V]] - def __getitem__(self, key): + def __getitem__(self, key: K) -> V: for _mapping in self.maps: try: return _mapping[key] @@ -51,7 +52,7 @@ def __getitem__(self, key): pass raise KeyError(key) - def __iter__(self) -> Iterator: + def __iter__(self) -> Iterator[K]: _seen: set = set() for _mapping in self.maps: for _key in _mapping.keys(): @@ -59,8 +60,8 @@ def __iter__(self) -> Iterator: yield _key _seen.add(_key) - def __len__(self): # for Mapping + def __len__(self) -> int: # for Mapping return sum(1 for _ in self) # use __iter__ - def with_new(self, new_map): + def with_new(self, new_map: Mapping[K, V]) -> Self: return dataclasses.replace(self, maps=[new_map, *self.maps]) diff --git a/trove/util/django.py b/trove/util/django.py index e927f2a68..77cf184bd 100644 --- a/trove/util/django.py +++ b/trove/util/django.py @@ -1,11 +1,15 @@ from __future__ import annotations -from collections.abc import Iterator +from collections.abc import Generator +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from django.db.models.query import QuerySet __all__ = ('pk_chunked',) -def pk_chunked(queryset, chunksize: int) -> Iterator[list]: +def pk_chunked(queryset: QuerySet, chunksize: int) -> Generator[list]: '''pk_chunked: get primary key values, in chunks, for the given queryset yields non-empty lists of primary keys up to `chunksize` long diff --git a/trove/util/frozen.py b/trove/util/frozen.py index 65709f3fb..8030cbaf3 100644 --- a/trove/util/frozen.py +++ b/trove/util/frozen.py @@ -1,6 +1,6 @@ -import collections.abc +from collections.abc import Mapping import types - +from typing import Any _FROZEN_TYPES = ( tuple, @@ -11,7 +11,7 @@ ) -def freeze(obj): +def freeze(obj: Any) -> Any: ''' >>> freeze([1, 1, 2]) (1, 1, 2) @@ -37,10 +37,8 @@ def freeze(obj): raise ValueError(f'how freeze {obj!r}?') -def freeze_mapping(_base_mapping=None, /, **kwargs) -> collections.abc.Mapping: - _mutable_mapping = {} - for _map in (_base_mapping, kwargs): - if _map is not None: - for _key, _val in _map.items(): - _mutable_mapping[_key] = freeze(_val) +def freeze_mapping[K, V](_map: Mapping[K, V]) -> Mapping[K, V]: + _mutable_mapping: dict[K, V] = {} + for _key, _val in _map.items(): + _mutable_mapping[_key] = freeze(_val) return types.MappingProxyType(_mutable_mapping) diff --git a/trove/util/iris.py b/trove/util/iris.py index 736758a64..8a1c7e105 100644 --- a/trove/util/iris.py +++ b/trove/util/iris.py @@ -50,7 +50,7 @@ def get_iri_scheme(iri: str) -> str: return _iri_scheme -def iris_sufficiently_equal(*iris) -> bool: +def iris_sufficiently_equal(*iris: str) -> bool: ''' >>> iris_sufficiently_equal( ... 'flipl://iri.example/blarg/blerg/?#', @@ -112,7 +112,7 @@ def get_sufficiently_unique_iri_and_scheme(iri: str) -> tuple[str, str]: return (_cleaned_remainder, _scheme) -def is_worthwhile_iri(iri: str): +def is_worthwhile_iri(iri: str) -> bool: ''' >>> is_worthwhile_iri('flipl://iri.example/blarg/?#') True @@ -127,7 +127,7 @@ def is_worthwhile_iri(iri: str): ) -def iri_path_as_keyword(iris: list[str] | tuple[str, ...], *, suffuniq=False) -> str: +def iri_path_as_keyword(iris: list[str] | tuple[str, ...], *, suffuniq: bool = False) -> str: '''return a string-serialized list of iris meant for storing in an elasticsearch "keyword" field (happens to use json) @@ -138,6 +138,10 @@ def iri_path_as_keyword(iris: list[str] | tuple[str, ...], *, suffuniq=False) -> ... suffuniq=True) '["://iri.example/blarg", "namly:urn.example:blerg"]' ''' + assert isinstance(iris, (list, tuple)) and all( + isinstance(_pathstep, str) + for _pathstep in iris + ), f'expected list or tuple of str, got {iris}' _list = iris if suffuniq: _list = [ diff --git a/trove/util/json.py b/trove/util/json.py new file mode 100644 index 000000000..aa647681c --- /dev/null +++ b/trove/util/json.py @@ -0,0 +1,6 @@ +from __future__ import annotations + + +type JsonObject = dict[str, JsonValue] + +type JsonValue = str | int | float | list[JsonValue] | JsonObject | None diff --git a/trove/util/queryparams.py b/trove/util/queryparams.py index bdf667f56..664e63971 100644 --- a/trove/util/queryparams.py +++ b/trove/util/queryparams.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Iterable import dataclasses import re -import typing +from typing import Self # TODO: remove django dependency (tho it is convenient) from django.http import QueryDict @@ -35,12 +36,8 @@ class QueryparamName: family: str bracketed_names: tuple[str, ...] = () - def __post_init__(self): - if not isinstance(self.bracketed_names, tuple): - super().__setattr__('bracketed_names', tuple(self.bracketed_names)) - @classmethod - def from_str(cls, queryparam_name: str) -> 'QueryparamName': + def from_str(cls, queryparam_name: str) -> Self: family_match = QUERYPARAM_FAMILY_REGEX.match(queryparam_name) if not family_match: raise trove_exceptions.InvalidQueryParamName(queryparam_name) @@ -57,7 +54,7 @@ def from_str(cls, queryparam_name: str) -> 'QueryparamName': raise trove_exceptions.InvalidQueryParamName(queryparam_name) return cls(family, tuple(bracketed_names)) - def __str__(self): + def __str__(self) -> str: return ''.join(( self.family, *( @@ -87,11 +84,11 @@ def queryparams_from_querystring(querystring: str) -> QueryparamDict: return _queryparams -def split_queryparam_value(value: str): +def split_queryparam_value(value: str) -> list[str]: return value.split(QUERYPARAM_VALUES_DELIM) -def join_queryparam_value(values: typing.Iterable[str]): +def join_queryparam_value(values: Iterable[str]) -> str: return QUERYPARAM_VALUES_DELIM.join(values) diff --git a/trove/util/randomness.py b/trove/util/randomness.py index ae93d0a83..6e63965f3 100644 --- a/trove/util/randomness.py +++ b/trove/util/randomness.py @@ -1,11 +1,8 @@ +from collections.abc import Iterable import random -import typing -_T = typing.TypeVar('_T') - - -def shuffled(items: typing.Iterable[_T]) -> list[_T]: +def shuffled[T](items: Iterable[T]) -> list[T]: _itemlist = list(items) random.shuffle(_itemlist) return _itemlist diff --git a/trove/util/trove_params.py b/trove/util/trove_params.py index c693de112..8801e7d5b 100644 --- a/trove/util/trove_params.py +++ b/trove/util/trove_params.py @@ -26,7 +26,7 @@ class BasicTroveParams: iri_shorthand: rdf.IriShorthand = dataclasses.field(repr=False) accept_mediatype: str | None included_relations: PropertypathSet = dataclasses.field(repr=False, compare=False) - attrpaths_by_type: Mapping[str, PropertypathSet] = dataclasses.field(repr=False, compare=False) + attrpaths_by_type: Mapping[str, tuple[Propertypath, ...]] = dataclasses.field(repr=False, compare=False) blend_cards: bool ### @@ -65,7 +65,7 @@ def _default_attrpaths(cls) -> Mapping[str, tuple[Propertypath, ...]]: return {} @classmethod - def _gather_shorthand(cls, queryparams: _qp.QueryparamDict): + def _gather_shorthand(cls, queryparams: _qp.QueryparamDict) -> rdf.IriShorthand: _prefixmap = {} for _qp_name, _iri in queryparams.get('iriShorthand', []): try: @@ -95,7 +95,9 @@ def _gather_attrpaths(cls, queryparams: _qp.QueryparamDict, shorthand: rdf.IriSh str, tuple[Propertypath, ...], ]: - _attrpaths = SimpleChainMap([cls._default_attrpaths()]) + _attrpaths: SimpleChainMap[str, tuple[Propertypath, ...]] = SimpleChainMap( + [cls._default_attrpaths()], + ) _fields_params = queryparams.get('fields', []) if _fields_params: _requested: dict[str, list[Propertypath]] = defaultdict(list) diff --git a/trove/views/_base.py b/trove/views/_base.py index e2cd48f48..802aa56e2 100644 --- a/trove/views/_base.py +++ b/trove/views/_base.py @@ -1,32 +1,38 @@ -__all__ = ( - 'GatheredTroveView', - 'StaticTroveView', -) - +from __future__ import annotations import abc from collections.abc import Container import functools -from typing import ClassVar +from typing import ( + ClassVar, + TYPE_CHECKING, +) -from django import http as djhttp from django.views import View from primitive_metadata import gather from primitive_metadata import primitive_rdf as rdf from trove import exceptions as trove_exceptions -from trove.vocab.namespaces import TROVE, RDF from trove.util.trove_params import BasicTroveParams +from trove.vocab.namespaces import TROVE, RDF from trove.render import ( - BaseRenderer, DEFAULT_RENDERER_TYPE, get_renderer_type, ) -from trove.render._rendering import ProtoRendering from ._gather_ask import ask_gathering_from_params from ._responder import ( make_http_error_response, make_http_response, ) +if TYPE_CHECKING: + from django.http import HttpResponse, StreamingHttpResponse, HttpRequest + from trove.render import BaseRenderer + from trove.render._rendering import ProtoRendering + + +__all__ = ( + 'GatheredTroveView', + 'StaticTroveView', +) class BaseTroveView(View, abc.ABC): @@ -37,7 +43,7 @@ class BaseTroveView(View, abc.ABC): def _render_response_content(self, request, params, renderer_type: type[BaseRenderer], url_kwargs) -> ProtoRendering: raise NotImplementedError - def get(self, request, **kwargs): + def get(self, request: HttpRequest, **kwargs: str) -> HttpResponse | StreamingHttpResponse: try: _renderer_type = get_renderer_type(request) except trove_exceptions.CannotRenderMediatype as _error: @@ -57,7 +63,7 @@ def get(self, request, **kwargs): renderer_type=_renderer_type, ) - def _parse_params(self, request: djhttp.HttpRequest): + def _parse_params(self, request: HttpRequest): return self.params_type.from_querystring(request.META['QUERY_STRING']) @@ -78,10 +84,10 @@ def _gather_to_renderer(self, focus, params, renderer_type: type[BaseRenderer]) ask_gathering_from_params(_gathering, params, focus) return renderer_type(focus, _gathering) - def _get_focus_iri(self, request, params): + def _get_focus_iri(self, request, params) -> str: return request.build_absolute_uri() - def _build_focus(self, request, params, url_kwargs): + def _build_focus(self, request, params, url_kwargs) -> gather.Focus: return gather.Focus.new(self._get_focus_iri(request, params), self.focus_type_iris) def _build_gathering(self, params, renderer_type: type[BaseRenderer]) -> gather.Gathering: @@ -89,7 +95,7 @@ def _build_gathering(self, params, renderer_type: type[BaseRenderer]) -> gather. self._get_gatherer_kwargs(params, renderer_type), ) - def _get_gatherer_kwargs(self, params, renderer_type): + def _get_gatherer_kwargs(self, params, renderer_type) -> dict: _kwargs = {} _deriver_kw = _get_param_keyword(TROVE.deriverIRI, self.gathering_organizer) if _deriver_kw: diff --git a/trove/views/_gather_ask.py b/trove/views/_gather_ask.py index c995a9907..9cf4b5694 100644 --- a/trove/views/_gather_ask.py +++ b/trove/views/_gather_ask.py @@ -7,7 +7,7 @@ def ask_gathering_from_params( gathering: gather.Gathering, params: BasicTroveParams, start_focus: gather.Focus, -): +) -> None: # fill the gathering's cache with included related resources... gathering.ask(params.included_relations, focus=start_focus) # ...and add requested attributes on the focus and related resources diff --git a/trove/views/_responder.py b/trove/views/_responder.py index 21f6b1652..1d3365742 100644 --- a/trove/views/_responder.py +++ b/trove/views/_responder.py @@ -18,7 +18,7 @@ def make_http_response( content_rendering: ProtoRendering, http_headers: typing.Iterable[tuple[str, str]] = (), http_request: djhttp.HttpRequest | None = None, -) -> djhttp.HttpResponse: +) -> djhttp.HttpResponse | djhttp.StreamingHttpResponse: _response_type = ( djhttp.StreamingHttpResponse if isinstance(content_rendering, StreamableRendering) @@ -50,13 +50,13 @@ def make_http_error_response( ) -def _sanitize_file_name(requested_name: str): +def _sanitize_file_name(requested_name: str) -> str: _underscored = re.sub(r'["\'/:\\;\s]', '_', requested_name) _datestamp = datetime.date.today().isoformat() return f'{_datestamp}_{_underscored}' if _underscored else _datestamp -def _get_file_name(requested_name: str, mediatype: str): +def _get_file_name(requested_name: str, mediatype: str) -> str: _file_name = _sanitize_file_name(requested_name) _dot_extension = mediatypes.dot_extension(mediatype) if _file_name.endswith(_dot_extension): @@ -64,7 +64,7 @@ def _get_file_name(requested_name: str, mediatype: str): return f'{_file_name}{_dot_extension}' -def _disposition(filename: str): +def _disposition(filename: str) -> bytes: return b'; '.join(( b'attachment', b'filename=' + filename.encode('latin-1', errors='replace'), diff --git a/trove/views/docs.py b/trove/views/docs.py index 9ae818602..72c9a9130 100644 --- a/trove/views/docs.py +++ b/trove/views/docs.py @@ -1,4 +1,4 @@ -from django.http import HttpResponse +from django.http import HttpResponse, HttpRequest from django.template.response import SimpleTemplateResponse from django.views import View @@ -7,7 +7,7 @@ class OpenapiJsonView(View): - def get(self, request): + def get(self, request: HttpRequest) -> HttpResponse: return HttpResponse( content=get_trove_openapi_json(), content_type=mediatypes.JSON, @@ -15,6 +15,6 @@ def get(self, request): class OpenapiHtmlView(View): - def get(self, request): + def get(self, request: HttpRequest) -> SimpleTemplateResponse: # TODO: parameterize title, openapi.json url return SimpleTemplateResponse('trove/openapi-redoc.html') diff --git a/trove/views/ingest.py b/trove/views/ingest.py index 73d6cb021..a6b21590a 100644 --- a/trove/views/ingest.py +++ b/trove/views/ingest.py @@ -3,27 +3,31 @@ import logging from django import http +from django.http import HttpRequest, HttpResponse from django.views import View from share.models.feature_flag import FeatureFlag from trove import digestive_tract from trove import exceptions as trove_exceptions from trove.util.queryparams import parse_booly_str +if __debug__: + from share.models import ShareUser logger = logging.getLogger(__name__) class RdfIngestView(View): - def get(self, request): + def get(self, request: HttpRequest) -> None: # TODO: something? maybe show this user's most recently pushed rdf for this pid raise http.Http404 - def post(self, request): + def post(self, request: HttpRequest) -> HttpResponse: # TODO: better error responses (jsonapi? shacl:ValidationReport?) # TODO: permissions by focus_iri domain (compare with user's Source)? if not request.user.is_authenticated: return http.HttpResponse(status=HTTPStatus.UNAUTHORIZED) + assert isinstance(request.user, ShareUser) if FeatureFlag.objects.flag_is_up(FeatureFlag.FORBID_UNTRUSTED_FEED) and not request.user.is_trusted: return http.HttpResponse(status=HTTPStatus.FORBIDDEN) # TODO: declare/validate params with dataclass @@ -41,6 +45,8 @@ def post(self, request): return http.HttpResponse('expiration_date queryparam must be in ISO-8601 date format (YYYY-MM-DD)', status=HTTPStatus.BAD_REQUEST) _nonurgent = parse_booly_str(request.GET.get('nonurgent')) try: + if not request.content_type: + raise trove_exceptions.DigestiveError('missing content-type') digestive_tract.ingest( raw_record=request.body.decode(encoding='utf-8'), record_mediatype=request.content_type, @@ -59,10 +65,11 @@ def post(self, request): # TODO: include (link to?) extracted card(s) return http.HttpResponse(status=HTTPStatus.CREATED) - def delete(self, request): + def delete(self, request: HttpRequest) -> HttpResponse: # TODO: cleaner permissions if not request.user.is_authenticated: return http.HttpResponse(status=HTTPStatus.UNAUTHORIZED) + assert isinstance(request.user, ShareUser) if FeatureFlag.objects.flag_is_up(FeatureFlag.FORBID_UNTRUSTED_FEED) and not request.user.is_trusted: return http.HttpResponse(status=HTTPStatus.FORBIDDEN) # TODO: declare/validate params with dataclass diff --git a/trove/views/search.py b/trove/views/search.py index f84f50623..9874a7788 100644 --- a/trove/views/search.py +++ b/trove/views/search.py @@ -50,9 +50,9 @@ def get_search_handler( def _get_wrapped_handler(self, strategy: index_strategy.IndexStrategy): _raw_handler = self.get_search_handler(strategy) - def _wrapped_handler(search_params): + def _wrapped_handler(search_params: CardsearchParams) -> BasicSearchHandle: _handle = _raw_handler(search_params) - _handle.handler = _wrapped_handler + _handle.handler = _wrapped_handler # type: ignore[assignment] return _handle return _wrapped_handler @@ -61,7 +61,7 @@ class CardsearchView(_BaseTrovesearchView): focus_type = CardsearchFocus params_type = CardsearchParams - def get_search_handler(self, strategy): + def get_search_handler(self, strategy: index_strategy.IndexStrategy): return strategy.pls_handle_cardsearch @@ -69,5 +69,5 @@ class ValuesearchView(_BaseTrovesearchView): focus_type = ValuesearchFocus params_type = ValuesearchParams - def get_search_handler(self, strategy): + def get_search_handler(self, strategy: index_strategy.IndexStrategy): return strategy.pls_handle_valuesearch diff --git a/trove/views/vocab.py b/trove/views/vocab.py index 3a896fe82..16f143ad2 100644 --- a/trove/views/vocab.py +++ b/trove/views/vocab.py @@ -1,19 +1,23 @@ +from __future__ import annotations +from typing import TYPE_CHECKING from urllib.parse import urlencode -from django import http +from django.http import Http404 from django.shortcuts import redirect from django.urls import reverse from django.views import View from trove.vocab.namespaces import TROVE from trove.vocab.trove import TROVE_API_THESAURUS +if TYPE_CHECKING: + from django.http import HttpRequest, HttpResponse, StreamingHttpResponse class TroveVocabView(View): - def get(self, request, vocab_term): + def get(self, request: HttpRequest, vocab_term: str) -> HttpResponse | StreamingHttpResponse: _iri = TROVE[vocab_term] if _iri not in TROVE_API_THESAURUS: - raise http.Http404 + raise Http404 _browse_url = '?'.join(( reverse('trove:browse-iri'), urlencode({'iri': _iri}), diff --git a/trove/vocab/osfmap.py b/trove/vocab/osfmap.py index d67e545e8..492deb64b 100644 --- a/trove/vocab/osfmap.py +++ b/trove/vocab/osfmap.py @@ -999,11 +999,11 @@ def osfmap_json_shorthand() -> IriShorthand: ) -def parse_osfmap_propertypath(serialized_path: str, *, allow_globs=False) -> Propertypath: +def parse_osfmap_propertypath(serialized_path: str, *, allow_globs: bool = False) -> Propertypath: return parse_propertypath(serialized_path, osfmap_json_shorthand(), allow_globs=allow_globs) -def parse_osfmap_propertypath_set(serialized_path_set: str, *, allow_globs=False) -> Iterator[Propertypath]: +def parse_osfmap_propertypath_set(serialized_path_set: str, *, allow_globs: bool = False) -> Iterator[Propertypath]: for _path in split_queryparam_value(serialized_path_set): yield parse_osfmap_propertypath(_path, allow_globs=allow_globs) @@ -1044,11 +1044,11 @@ def suggested_property_paths(type_iris: set[str]) -> tuple[tuple[str, ...], ...] else: _suggested = ALL_SUGGESTED_PROPERTY_PATHS if _suggested and FeatureFlag.objects.flag_is_up(FeatureFlag.SUGGEST_CREATOR_FACET): - return ((DCTERMS.creator,), *_suggested) + return (DCTERMS.creator,), *_suggested return _suggested -def suggested_filter_operator(property_iri: str): +def suggested_filter_operator(property_iri: str) -> str: # return iri value for the suggested trove-search filter operator if is_date_property(property_iri): return TROVE['at-date'] @@ -1057,6 +1057,6 @@ def suggested_filter_operator(property_iri: str): return TROVE['any-of'] -def is_date_property(property_iri): +def is_date_property(property_iri: str) -> bool: # TODO: better inference (rdfs:range?) return property_iri in DATE_PROPERTIES diff --git a/trove/vocab/static_vocab/__init__.py b/trove/vocab/static_vocab/__init__.py index 97e634e21..f60b53751 100644 --- a/trove/vocab/static_vocab/__init__.py +++ b/trove/vocab/static_vocab/__init__.py @@ -1,7 +1,6 @@ import functools import pathlib import types - from primitive_metadata import primitive_rdf as rdf import rdflib @@ -40,7 +39,7 @@ @functools.cache -def combined_thesaurus(): +def combined_thesaurus(): # type: ignore _combined_rdf = rdf.RdfGraph() for _thesaurus in _STATIC_THESAURUSES: _combined_rdf.add_tripledict(_thesaurus) @@ -52,7 +51,7 @@ def combined_thesaurus(): @functools.cache -def combined_thesaurus__suffuniq(): +def combined_thesaurus__suffuniq(): # type: ignore return types.MappingProxyType({ get_sufficiently_unique_iri(_subj): _twoples for _subj, _twoples in combined_thesaurus().items() diff --git a/trove/vocab/trove.py b/trove/vocab/trove.py index 166a5a24e..7dd6d1a9e 100644 --- a/trove/vocab/trove.py +++ b/trove/vocab/trove.py @@ -1,5 +1,7 @@ import functools import urllib.parse +from typing import Union, Any +from uuid import UUID from django.conf import settings from django.urls import reverse @@ -38,11 +40,11 @@ # - TODO: each iri is an irL that resolves to rdf, html -def _literal_markdown(text: str, *, language: str): +def _literal_markdown(text: str, *, language: str) -> literal: return literal(text, language=language, mediatype='text/markdown;charset=utf-8') -def trove_browse_link(iri: str): +def trove_browse_link(iri: str) -> str: _compact = namespaces_shorthand().compact_iri(iri) return urllib.parse.urljoin( reverse('trove:browse-iri'), @@ -845,9 +847,9 @@ def trove_json_shorthand() -> IriShorthand: @functools.cache -def trove_indexcard_namespace(): +def trove_indexcard_namespace() -> IriNamespace: return IriNamespace(f'{settings.SHARE_WEB_URL}trove/index-card/') -def trove_indexcard_iri(indexcard_uuid): +def trove_indexcard_iri(indexcard_uuid: Union[UUID, str]) -> Any: return trove_indexcard_namespace()[str(indexcard_uuid)]