diff --git a/.codecov.yml b/.codecov.yml index f4a4f9cbcf4..a3f9e9e6dd0 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -18,7 +18,6 @@ flag_management: component_management: default_rules: - carryforward: true statuses: - type: project target: auto diff --git a/.env-devel b/.env-devel index e2f8baef3ca..d5df4deab06 100644 --- a/.env-devel +++ b/.env-devel @@ -49,6 +49,8 @@ CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}' CATALOG_TRACING=null +CELERY_RESULT_EXPIRES=P7D + CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}' CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest CLUSTERS_KEEPER_DASK_NTHREADS=0 @@ -83,6 +85,12 @@ DIRECTOR_REGISTRY_CACHING=True DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null DIRECTOR_TRACING=null +DOCKER_API_PROXY_HOST=docker-api-proxy +DOCKER_API_PROXY_PASSWORD=null +DOCKER_API_PROXY_PORT=8888 +DOCKER_API_PROXY_SECURE=False +DOCKER_API_PROXY_USER=null + EFS_USER_ID=8006 EFS_USER_NAME=efs EFS_GROUP_ID=8106 @@ -133,6 +141,14 @@ DYNAMIC_SCHEDULER_UI_STORAGE_SECRET=adminadmin FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "unknown@osparc.io", "affiliation": "unknown"}}' +WEBSERVER_LICENSES={} +LICENSES_ITIS_VIP_SYNCER_ENABLED=false +LICENSES_ITIS_VIP_SYNCER_PERIODICITY=1D00:00:00 +LICENSES_ITIS_VIP_API_URL=https://replace-with-itis-api/{category} +LICENSES_ITIS_VIP_CATEGORIES='{"HumanWholeBody": "Humans", "HumanBodyRegion": "Humans (Region)", "AnimalWholeBody": "Animal"}' +LICENSES_SPEAG_PHANTOMS_API_URL=https://replace-with-speag-api/{category} +LICENSES_SPEAG_PHANTOMS_CATEGORIES='{"ComputationalPhantom": "Phantom of the Opera"}' + # Can use 'docker run -it itisfoundation/invitations:latest simcore-service-invitations generate-dotenv --auto-password' INVITATIONS_DEFAULT_PRODUCT=osparc INVITATIONS_HOST=invitations @@ -236,8 +252,6 @@ SMTP_PROTOCOL=UNENCRYPTED SMTP_USERNAME=it_doesnt_matter # STORAGE ---- -BF_API_KEY=none -BF_API_SECRET=none STORAGE_ENDPOINT=storage:8080 STORAGE_HOST=storage STORAGE_LOGLEVEL=INFO @@ -272,7 +286,6 @@ WB_GC_GROUPS=0 WB_GC_INVITATIONS=null WB_GC_LOGIN=null WB_GC_LOGLEVEL=INFO -WB_GC_META_MODELING=0 WB_GC_NOTIFICATIONS=0 WB_GC_PAYMENTS=null WB_GC_PRODUCTS=0 @@ -287,7 +300,6 @@ WB_GC_STUDIES_DISPATCHER=null WB_GC_TAGS=0 WB_GC_TRACING=null WB_GC_USERS={} -WB_GC_VERSION_CONTROL=0 WB_GC_WALLETS=0 WB_DB_EL_ACTIVITY=null @@ -304,7 +316,6 @@ WB_DB_EL_GROUPS=0 WB_DB_EL_INVITATIONS=null WB_DB_EL_LOGIN=null WB_DB_EL_LOGLEVEL=INFO -WB_DB_EL_META_MODELING=0 WB_DB_EL_NOTIFICATIONS=0 WB_DB_EL_PAYMENTS=null WB_DB_EL_PRODUCTS=0 @@ -319,7 +330,6 @@ WB_DB_EL_STUDIES_DISPATCHER=null WB_DB_EL_TAGS=0 WB_DB_EL_TRACING=null WB_DB_EL_USERS={} -WB_DB_EL_VERSION_CONTROL=0 WB_DB_EL_WALLETS=0 # WEBSERVER ---- @@ -371,10 +381,8 @@ WEBSERVER_GARBAGE_COLLECTOR=null WEBSERVER_GROUPS=1 WEBSERVER_GUNICORN_CMD_ARGS=--timeout=180 WEBSERVER_HOST=webserver -WEBSERVER_LICENSES=true WEBSERVER_LOGIN={} WEBSERVER_LOGLEVEL=INFO -WEBSERVER_META_MODELING=1 WEBSERVER_NOTIFICATIONS=1 WEBSERVER_PAYMENTS={} WEBSERVER_PORT=8080 @@ -392,4 +400,3 @@ WEBSERVER_STUDIES_DISPATCHER={} WEBSERVER_TAGS=1 WEBSERVER_TRACING=null WEBSERVER_USERS={} -WEBSERVER_VERSION_CONTROL=1 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 36c26ee310e..8705bf17e2e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,45 +4,47 @@ # files and folders recursively -.env-* @mrnicegyu11 @YuryHrytsuk -Makefile @pcrespov @sanderegg +.env-* @mrnicegyu11 @YuryHrytsuk +Makefile @pcrespov @sanderegg # NOTE: '/' denotes the root of the repository -/.github/ @sanderegg @pcrespov -/api/ @sanderegg @pcrespov @matusdrobuliak66 -/ci/ @sanderegg @pcrespov -/docs/ @pcrespov -/packages/common-library/ @giancarloromeo -/packages/models-library/ @sanderegg @pcrespov @matusdrobuliak66 @giancarloromeo -/packages/postgres-database/ @matusdrobuliak66 -/packages/pytest-simcore/ @pcrespov @sanderegg -/packages/service-integration/ @pcrespov @sanderegg @GitHK -/packages/service-library/ @pcrespov -/packages/settings-library/ @pcrespov @sanderegg -/requirements/ @pcrespov @matusdrobuliak66 -/services/agent/ @GitHK -/services/api-server/ @pcrespov -/services/autoscaling/ @sanderegg -/services/catalog/ @pcrespov @sanderegg -/services/clusters-keeper/ @sanderegg -/services/datcore-adapter/ @sanderegg -/services/director*/ @sanderegg @pcrespov @GitHK -/services/docker-compose*.yml @sanderegg @mrnicegyu11 @YuryHrytsuk -/services/dynamic-sidecar/ @GitHK -/services/efs-guardian/ @matusdrobuliak66 -/services/invitations/ @pcrespov -/services/migration/ @pcrespov -/services/payments/ @pcrespov @matusdrobuliak66 -/services/resource-usage-tracker/ @matusdrobuliak66 -/services/static-webserver/ @GitHK -/services/static-webserver/client/ @jsaq007 @odeimaiz @ignapas -/services/storage/ @sanderegg -/services/web/server/ @pcrespov @sanderegg @GitHK @matusdrobuliak66 -/tests/e2e-frontend/ @odeimaiz -/tests/e2e-playwright/ @matusdrobuliak66 -/tests/environment-setup/ @pcrespov -/tests/performance/ @pcrespov @sanderegg -/tests/public-api/ @pcrespov -requirements/* @pcrespov -tools/* @pcrespov +/.github/ @sanderegg @pcrespov +/api/ @sanderegg @pcrespov @matusdrobuliak66 +/ci/ @sanderegg @pcrespov +/docs/ @pcrespov +/packages/common-library/ @giancarloromeo +/packages/models-library/ @sanderegg @pcrespov @matusdrobuliak66 @giancarloromeo +/packages/postgres-database/ @matusdrobuliak66 +/packages/pytest-simcore/ @pcrespov @sanderegg +/packages/service-integration/ @pcrespov @sanderegg @GitHK +/packages/service-library/ @pcrespov +/packages/settings-library/ @pcrespov @sanderegg +/requirements/ @pcrespov @matusdrobuliak66 +/services/agent/ @GitHK +/services/api-server/ @pcrespov +/services/api-server/tests/unit/pact_broker/ @matusdrobuliak66 +/services/autoscaling/ @sanderegg +/services/catalog/ @pcrespov @sanderegg +/services/clusters-keeper/ @sanderegg +/services/datcore-adapter/ @sanderegg +/services/director*/ @sanderegg @pcrespov @GitHK +/services/docker-compose.yml @sanderegg @mrnicegyu11 @YuryHrytsuk +/services/docker-compose.*.yml @sanderegg +/services/dynamic-sidecar/ @GitHK +/services/efs-guardian/ @matusdrobuliak66 +/services/invitations/ @pcrespov +/services/migration/ @pcrespov +/services/payments/ @pcrespov @matusdrobuliak66 +/services/resource-usage-tracker/ @matusdrobuliak66 +/services/static-webserver/ @GitHK +/services/static-webserver/client/ @odeimaiz +/services/storage/ @sanderegg +/services/web/server/ @pcrespov @sanderegg @GitHK @matusdrobuliak66 +/tests/e2e-frontend/ @odeimaiz +/tests/e2e-playwright/ @matusdrobuliak66 +/tests/environment-setup/ @pcrespov +/tests/performance/ @pcrespov @sanderegg +/tests/public-api/ @pcrespov +requirements/* @pcrespov +tools/* @pcrespov diff --git a/.github/mergify.yml b/.github/mergify.yml index 633b447a990..8e116deb45a 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -12,58 +12,3 @@ queue_rules: - "#approved-reviews-by>=2" # Requires 2 approving reviews - "#changes-requested-reviews-by=0" # No changes requested - "#review-threads-unresolved=0" # All review threads resolved - - merge_conditions: # conditions to be met before being able to merge - # list of CI checks that need to pass - - check-success=system-test-environment-setup - - check-success=changes - - check-success=build-test-images - - check-success=build-test-images-frontend - - check-success=unit-tests - - check-success=integration-tests - - check-success=system-tests - - -pull_request_rules: - - name: retry flaky tests (when PR is approved) - conditions: - - label=🤖-automerge - - label!=🤖-do-not-merge - - base=master - - - -draft # PR is not in draft state - - -conflict # No merge conflicts - - "#approved-reviews-by>=2" # Requires 2 approving reviews - - "#changes-requested-reviews-by=0" # No changes requested - - "#review-threads-unresolved=0" # All review threads resolved - - - or: - - check-failure=system-test-environment-setup - - check-failure=changes - - check-failure=build-test-images - - check-failure=build-test-images-frontend - - check-failure=unit-tests - - check-failure=integration-tests - - check-failure=system-tests - actions: - comment: - message: "Restarted CI due to failing tests" - github_actions: - workflow: - dispatch: - - workflow: ci-testing-deploy.yml - - - name: Automatically add PR to queue if it meets conditions - conditions: - - "label=🤖-automerge" - - "label!=🤖-do-not-merge" - - "base=master" - - - "-draft" # PR is not in draft state - - "-conflict" # PR has no conflicts - - "#approved-reviews-by>=2" # Requires 2 approvals - - "#changes-requested-reviews-by=0" # No requested changes - - "#review-threads-unresolved=0" # All review threads resolved - actions: - queue: - name: default diff --git a/.github/workflows/ci-arm-build.yml b/.github/workflows/ci-arm-build.yml index fcced45470a..1cc89874b06 100644 --- a/.github/workflows/ci-arm-build.yml +++ b/.github/workflows/ci-arm-build.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - os: [ubuntu-22.04] + os: [ubuntu-24.04] python: ["3.11"] env: # secrets can be set in settings/secrets on github diff --git a/.github/workflows/ci-multi-architecture-fusing.yml b/.github/workflows/ci-multi-architecture-fusing.yml index bd825b0d247..dadeedf1038 100644 --- a/.github/workflows/ci-multi-architecture-fusing.yml +++ b/.github/workflows/ci-multi-architecture-fusing.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - os: [ubuntu-22.04] + os: [ubuntu-24.04] python: ["3.11"] env: # secrets can be set in settings/secrets on github diff --git a/.github/workflows/ci-pact-master.yml b/.github/workflows/ci-pact-master.yml new file mode 100644 index 00000000000..a885fe51238 --- /dev/null +++ b/.github/workflows/ci-pact-master.yml @@ -0,0 +1,41 @@ +# This workflow holds jobs which are required to pass before merging into master + +name: CI PACT Master +on: + push: + branches: + - "master" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + pact-tests: + timeout-minutes: 10 + name: "Run PACT tests" + runs-on: ubuntu-latest + env: + # secrets can be set in settings/secrets on github + PACT_BROKER_URL: ${{ secrets.PACT_BROKER_URL }} + PACT_BROKER_USERNAME: ${{ secrets.PACT_BROKER_USERNAME }} + PACT_BROKER_PASSWORD: ${{ secrets.PACT_BROKER_PASSWORD }} + steps: + - name: setup python environment + uses: actions/setup-python@v5 + with: + python-version: "3.11" + - name: install uv + uses: astral-sh/setup-uv@v5 + with: + version: "0.5.x" + enable-cache: false + - name: checkout source branch + uses: actions/checkout@v4 + - name: Run pact tests + run: | + make devenv + source .venv/bin/activate + cd services/api-server + make install-ci + make test-pacts diff --git a/.github/workflows/ci-release.yml b/.github/workflows/ci-release.yml index 95a410245a3..6963b3a28c1 100644 --- a/.github/workflows/ci-release.yml +++ b/.github/workflows/ci-release.yml @@ -22,7 +22,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false env: TO_TAG_PREFIX: release-github diff --git a/.github/workflows/ci-staging.yml b/.github/workflows/ci-staging.yml index 1fd08e48593..dbef04653aa 100644 --- a/.github/workflows/ci-staging.yml +++ b/.github/workflows/ci-staging.yml @@ -22,7 +22,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false env: TO_TAG_PREFIX: staging-github diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 54e2dee1fe5..ad6eacf353b 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -77,6 +77,7 @@ jobs: migration: ${{ steps.filter.outputs.migration }} payments: ${{ steps.filter.outputs.payments }} dynamic-scheduler: ${{ steps.filter.outputs.dynamic-scheduler }} + docker-api-proxy: ${{ steps.filter.outputs.docker-api-proxy }} resource-usage-tracker: ${{ steps.filter.outputs.resource-usage-tracker }} static-webserver: ${{ steps.filter.outputs.static-webserver }} storage: ${{ steps.filter.outputs.storage }} @@ -233,6 +234,9 @@ jobs: - 'services/docker-compose*' - 'scripts/mypy/*' - 'mypy.ini' + docker-api-proxy: + - 'packages/**' + - 'services/docker-api-proxy/**' resource-usage-tracker: - 'packages/**' - 'services/resource-usage-tracker/**' @@ -270,7 +274,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false name: "[build] docker images (excluding frontend)" steps: @@ -305,7 +309,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false name: "[build] docker images (frontend-only)" steps: @@ -339,7 +343,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -387,7 +391,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -429,7 +433,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -471,7 +475,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -510,13 +514,13 @@ jobs: unit-test-storage: needs: changes if: ${{ needs.changes.outputs.storage == 'true' || github.event_name == 'push' }} - timeout-minutes: 18 # if this timeout gets too small, then split the tests + timeout-minutes: 25 # if this timeout gets too small, then split the tests name: "[unit] storage" runs-on: ${{ matrix.os }} strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -561,7 +565,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -608,7 +612,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -650,7 +654,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -698,7 +702,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -743,7 +747,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -794,7 +798,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -850,7 +854,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -901,7 +905,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -952,7 +956,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1003,7 +1007,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1048,7 +1052,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1093,7 +1097,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1138,7 +1142,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1183,7 +1187,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1228,7 +1232,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1283,7 +1287,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1328,7 +1332,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1384,7 +1388,7 @@ jobs: strategy: matrix: python: ["3.11", "3.12"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1419,7 +1423,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1464,7 +1468,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1509,7 +1513,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1554,7 +1558,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1599,7 +1603,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1644,7 +1648,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1688,7 +1692,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1729,7 +1733,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1774,7 +1778,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1870,7 +1874,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1893,14 +1897,14 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install uv uses: astral-sh/setup-uv@v5 with: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/web/server/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -1932,7 +1936,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -1957,14 +1961,14 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install uv uses: astral-sh/setup-uv@v5 with: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/web/server/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -1996,7 +2000,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2021,14 +2025,14 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install uv uses: astral-sh/setup-uv@v5 with: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/director-v2/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -2060,7 +2064,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false env: # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service @@ -2092,8 +2096,6 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install rclone run: sudo ./ci/github/helpers/install_rclone.bash - name: install uv @@ -2102,6 +2104,8 @@ jobs: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/director-v2/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -2133,7 +2137,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2158,8 +2162,6 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install rclone run: sudo ./ci/github/helpers/install_rclone.bash - name: install uv @@ -2168,6 +2170,8 @@ jobs: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/dynamic-sidecar/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -2190,6 +2194,71 @@ jobs: with: flags: integrationtests #optional + + integration-test-docker-api-proxy: + needs: [changes, build-test-images] + if: ${{ needs.changes.outputs.anything-py == 'true' || needs.changes.outputs.docker-api-proxy == 'true' || github.event_name == 'push'}} + timeout-minutes: 30 # if this timeout gets too small, then split the tests + name: "[int] docker-api-proxy" + runs-on: ${{ matrix.os }} + strategy: + matrix: + python: ["3.11"] + os: [ubuntu-24.04] + fail-fast: false + steps: + - uses: actions/checkout@v4 + - name: setup docker buildx + id: buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + - name: setup python environment + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: expose github runtime for buildx + uses: crazy-max/ghaction-github-runtime@v3 + # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 + - name: download docker images with retry + uses: Wandalen/wretry.action@master + with: + action: actions/download-artifact@v4 + with: | + name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend + path: /${{ runner.temp }}/build + attempt_limit: 5 + attempt_delay: 1000 + - name: install uv + uses: astral-sh/setup-uv@v5 + with: + version: "0.5.x" + enable-cache: false + cache-dependency-glob: "**/docker-api-proxy/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build + - name: show system version + run: ./ci/helpers/show_system_versions.bash + - name: install + run: ./ci/github/integration-testing/docker-api-proxy.bash install + - name: test + run: ./ci/github/integration-testing/docker-api-proxy.bash test + - name: upload failed tests logs + if: ${{ failure() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ github.job }}_docker_logs + path: ./services/docker-api-proxy/test_failures + - name: cleanup + if: ${{ !cancelled() }} + run: ./ci/github/integration-testing/docker-api-proxy.bash clean_up + - uses: codecov/codecov-action@v5 + if: ${{ !cancelled() }} + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + flags: integrationtests #optional + integration-test-simcore-sdk: needs: [changes, build-test-images] if: ${{ needs.changes.outputs.anything-py == 'true' || needs.changes.outputs.simcore-sdk == 'true' || github.event_name == 'push' }} @@ -2199,7 +2268,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2224,14 +2293,14 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install uv uses: astral-sh/setup-uv@v5 with: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/simcore-sdk/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -2262,6 +2331,7 @@ jobs: integration-test-director-v2-01, integration-test-director-v2-02, integration-test-dynamic-sidecar, + integration-test-docker-api-proxy, integration-test-simcore-sdk, integration-test-webserver-01, integration-test-webserver-02, @@ -2286,7 +2356,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2311,14 +2381,14 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install uv uses: astral-sh/setup-uv@v5 with: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/public-api/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -2346,7 +2416,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2371,14 +2441,14 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install uv uses: astral-sh/setup-uv@v5 with: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/swarm-deploy/requirements/ci.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install @@ -2412,7 +2482,7 @@ jobs: matrix: python: ["3.11"] node: [14] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2442,14 +2512,14 @@ jobs: path: /${{ runner.temp }}/build attempt_limit: 5 attempt_delay: 1000 - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - name: install uv uses: astral-sh/setup-uv@v5 with: version: "0.5.x" enable-cache: false cache-dependency-glob: "**/e2e/requirements/requirements.txt" + - name: load docker images + run: make load-images local-src=/${{ runner.temp }}/build - name: show system version run: ./ci/helpers/show_system_versions.bash - name: setup @@ -2494,7 +2564,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2556,7 +2626,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -2615,7 +2685,7 @@ jobs: strategy: matrix: python: ["3.11"] - os: [ubuntu-22.04] + os: [ubuntu-24.04] fail-fast: false env: # secrets can be set in settings/secrets on github @@ -2629,6 +2699,12 @@ jobs: uses: docker/setup-buildx-action@v3 with: driver: docker-container + - name: install uv + uses: astral-sh/setup-uv@v5 + with: + version: "0.5.x" + enable-cache: false + cache-dependency-glob: "**/e2e-playwright/requirements/ci.txt" # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - name: download docker images with retry uses: Wandalen/wretry.action@master diff --git a/.github/workflows/ci-testing-pull-request.yml b/.github/workflows/ci-testing-pull-request.yml index 935c0abb356..5090a6e3f47 100644 --- a/.github/workflows/ci-testing-pull-request.yml +++ b/.github/workflows/ci-testing-pull-request.yml @@ -10,6 +10,20 @@ on: branches: - "master" + workflow_dispatch: + inputs: + target_repo: + description: full repository name (e.g. 'ITISFoundation/osparc-simcore') + required: true + default: "ITISFoundation/osparc-simcore" + type: environment + target_branch: + description: Check backwards compatibility against target_branch in target_repo + required: true + default: "master" + type: environment + + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true @@ -31,13 +45,15 @@ jobs: enable-cache: false - name: checkout source branch uses: actions/checkout@v4 - - name: Regenerate specs and check + - name: Generate openapi specs run: | make devenv source .venv/bin/activate make openapi-specs + - name: Check openapi specs are up to date + run: | if ! ./ci/github/helpers/openapi-specs-diff.bash diff \ - https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.full_name }}/refs/heads/${{ github.event.pull_request.head.ref }} \ + "https://raw.githubusercontent.com/$GITHUB_REPOSITORY/$GITHUB_SHA" \ .; then \ echo "::error:: OAS are not up to date. Run 'make openapi-specs' to update them"; exit 1; \ fi @@ -54,10 +70,19 @@ jobs: python-version: "3.11" - name: checkout uses: actions/checkout@v4 + - name: Set environment variables based on event type + run: | + if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then + echo "REPO=${{ inputs.target_repo }}" >> $GITHUB_ENV + echo "BRANCH=${{ inputs.target_branch }}" >> $GITHUB_ENV + else + echo "REPO=${{ github.event.pull_request.base.repo.full_name }}" >> $GITHUB_ENV + echo "BRANCH=${{ github.base_ref }}" >> $GITHUB_ENV + fi - name: check api-server backwards compatibility run: | ./scripts/openapi-diff.bash breaking --fail-on ERR\ - https://raw.githubusercontent.com/${{ github.event.pull_request.base.repo.full_name }}/refs/heads/${{ github.event.pull_request.base.ref }}/services/api-server/openapi.json \ + "https://raw.githubusercontent.com/$REPO/refs/heads/$BRANCH/services/api-server/openapi.json" \ /specs/services/api-server/openapi.json all-oas-breaking: @@ -73,8 +98,17 @@ jobs: python-version: "3.11" - name: checkout uses: actions/checkout@v4 + - name: Set environment variables based on event type + run: | + if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then + echo "REPO=${{ inputs.target_repo }}" >> $GITHUB_ENV + echo "BRANCH=${{ inputs.target_branch }}" >> $GITHUB_ENV + else + echo "REPO=${{ github.event.pull_request.base.repo.full_name }}" >> $GITHUB_ENV + echo "BRANCH=${{ github.base_ref }}" >> $GITHUB_ENV + fi - name: Check openapi-specs backwards compatibility run: | ./ci/github/helpers/openapi-specs-diff.bash breaking \ - https://raw.githubusercontent.com/${{ github.event.pull_request.base.repo.full_name }}/refs/heads/${{ github.event.pull_request.base.ref }} \ + "https://raw.githubusercontent.com/$REPO/refs/heads/$BRANCH" \ . diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b6260cc42a4..a3178f8c69a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ default_language_version: python: python3.11 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.2.0 + rev: v5.0.0 hooks: - id: check-added-large-files args: ["--maxkb=1024"] @@ -22,7 +22,7 @@ repos: - id: no-commit-to-branch # NOTE: Keep order as pyupgrade (will update code) then pycln (remove unused imports), then isort (sort them) and black (final formatting) - repo: https://github.com/asottile/pyupgrade - rev: v2.34.0 + rev: v3.19.1 hooks: - id: pyupgrade args: @@ -36,13 +36,13 @@ repos: args: [--all, --expand-stars] name: prune imports - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + rev: 6.0.0 hooks: - id: isort args: ["--profile", "black"] name: sort imports - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 25.1.0 hooks: - id: black name: black format code diff --git a/.vscode/settings.template.json b/.vscode/settings.template.json index e2fcfd8b37b..4ebda848845 100644 --- a/.vscode/settings.template.json +++ b/.vscode/settings.template.json @@ -59,9 +59,7 @@ }, "hadolint.hadolintPath": "${workspaceFolder}/scripts/hadolint.bash", "hadolint.cliOptions": [], - "ruff.lint.args": [ - "--config=${workspaceFolder}/.ruff.toml" - ], + "ruff.configuration": "${workspaceFolder}/.ruff.toml", "ruff.path": [ "${workspaceFolder}/.venv/bin/ruff" ], diff --git a/Makefile b/Makefile index d8f0ccf252d..e1e51612e5d 100644 --- a/Makefile +++ b/Makefile @@ -47,6 +47,7 @@ SERVICES_NAMES_TO_BUILD := \ payments \ resource-usage-tracker \ dynamic-scheduler \ + docker-api-proxy \ service-integration \ static-webserver \ storage \ @@ -356,6 +357,8 @@ endef show-endpoints: @$(_show_endpoints) +export HOST_UV_CACHE_DIR := $(shell uv cache dir) + up-devel: .stack-simcore-development.yml .init-swarm $(CLIENT_WEB_OUTPUT) ## Deploys local development stack, qx-compile+watch and ops stack (pass 'make ops_disabled=1 up-...' to disable) # Start compile+watch front-end container [front-end] @$(MAKE_C) services/static-webserver/client down compile-dev flags=--watch @@ -662,6 +665,8 @@ local-registry: .env ## creates a local docker registry and configure simcore to echo configuring host file to redirect $(LOCAL_REGISTRY_HOSTNAME) to 127.0.0.1; \ sudo echo 127.0.0.1 $(LOCAL_REGISTRY_HOSTNAME) | sudo tee -a /etc/hosts;\ echo done) + @$(if $(shell test -f /etc/docker/daemon.json),, \ + sudo touch /etc/docker/daemon.json) @$(if $(shell jq -e '.["insecure-registries"]? | index("http://$(LOCAL_REGISTRY_HOSTNAME):5000")? // empty' /etc/docker/daemon.json),,\ echo configuring docker engine to use insecure local registry...; \ jq 'if .["insecure-registries"] | index("http://$(LOCAL_REGISTRY_HOSTNAME):5000") then . else .["insecure-registries"] += ["http://$(LOCAL_REGISTRY_HOSTNAME):5000"] end' /etc/docker/daemon.json > /tmp/daemon.json &&\ diff --git a/api/specs/storage/Makefile b/api/specs/storage/Makefile deleted file mode 100644 index 879009a1baa..00000000000 --- a/api/specs/storage/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -.DEFAULT_GOAL := all - -.PHONY: _check_venv_active -_check_venv_active: - # Checking whether virtual environment was activated - @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" - - -.PHONY: install -install-dev install: _check_venv_active - # installing storage and requirements.txt - @cd ./../../../services/storage && make install-dev && cd - - @uv pip install -r requirements.txt - - -.PHONY: all -all: _check_venv_active install - python openapi.py diff --git a/api/specs/storage/openapi.py b/api/specs/storage/openapi.py deleted file mode 100644 index 0f9ba5afb96..00000000000 --- a/api/specs/storage/openapi.py +++ /dev/null @@ -1,409 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - - -from enum import Enum -from typing import Annotated, Any - -from fastapi import Depends, FastAPI, Query, status -from models_library.api_schemas_storage import ( - FileMetaDataGet, - FileUploadCompleteFutureResponse, - FileUploadCompleteResponse, - FileUploadCompletionBody, - FileUploadSchema, - FoldersBody, - HealthCheck, - LinkType, - PresignedLink, - SoftCopyBody, - TableSynchronisation, -) -from models_library.app_diagnostics import AppStatusCheck -from models_library.generics import Envelope -from models_library.projects_nodes import NodeID -from models_library.projects_nodes_io import LocationID, StorageFileID -from models_library.users import UserID -from pydantic import AnyUrl, ByteSize -from servicelib.fastapi.openapi import create_openapi_specs -from servicelib.long_running_tasks._models import TaskGet, TaskId, TaskStatus -from settings_library.s3 import S3Settings -from simcore_service_storage._meta import API_VTAG -from simcore_service_storage.models import ( - DatasetMetaData, - FileMetaData, - SearchFilesQueryParams, -) -from simcore_service_storage.resources import storage_resources - -TAGS_DATASETS: list[str | Enum] = ["datasets"] -TAGS_FILES: list[str | Enum] = ["files"] -TAGS_HEALTH: list[str | Enum] = ["health"] -TAGS_LOCATIONS: list[str | Enum] = ["locations"] -TAGS_TASKS: list[str | Enum] = ["tasks"] -TAGS_SIMCORE_S3: list[str | Enum] = ["simcore-s3"] - - -app = FastAPI( - redoc_url=None, - description="API definition for simcore-service-storage service", - version="0.3.0", - title="simcore-service-storage API", - contact={"name": "IT'IS Foundation", "email": "support@simcore.io"}, - license_info={ - "name": "MIT", - "__PLACEHOLDER___KEY_url": "https://github.com/ITISFoundation/osparc-simcore/blob/master/LICENSE", - }, - servers=[ - { - "url": "/", - "description": "Default server: requests directed to serving url", - }, - { - "url": "http://{host}:{port}/", - "description": "Development server: can configure any base url", - "variables": { - "host": {"default": "127.0.0.1"}, - "port": {"default": "8000"}, - }, - }, - ], - openapi_tags=[ - {"name": x} - for x in ( - TAGS_DATASETS - + TAGS_FILES - + TAGS_HEALTH - + TAGS_LOCATIONS - + TAGS_TASKS - + TAGS_SIMCORE_S3 - ) - ], -) - - -# handlers_datasets.py - - -@app.get( - f"/{API_VTAG}/locations/{{location_id}}/datasets", - response_model=Envelope[list[DatasetMetaData]], - tags=TAGS_DATASETS, - operation_id="get_datasets_metadata", - summary="Get datasets metadata", -) -async def get_datasets_metadata(location_id: LocationID, user_id: UserID): - """returns all the top level datasets a user has access to""" - - -# handlers_files.py - - -@app.get( - f"/{API_VTAG}/locations/{{location_id}}/datasets/{{dataset_id}}/metadata", - response_model=Envelope[list[FileMetaDataGet]], - tags=TAGS_DATASETS, - operation_id="get_files_metadata_dataset", - summary="Get Files Metadata", -) -async def get_files_metadata_dataset( - location_id: LocationID, - dataset_id: str, - user_id: UserID, - expand_dirs: bool = Query( - True, - description=( - "Automatic directory expansion. This will be replaced by pagination the future" - ), - ), -): - """returns all the file meta data inside dataset with dataset_id""" - - -@app.get( - f"/{API_VTAG}/locations", - response_model=list[DatasetMetaData], - tags=TAGS_LOCATIONS, - operation_id="get_storage_locations", - summary="Get available storage locations", -) -async def get_storage_locations(user_id: UserID): - """Returns the list of available storage locations""" - - -@app.post( - f"/{API_VTAG}/locations/{{location_id}}:sync", - response_model=Envelope[TableSynchronisation], - tags=TAGS_LOCATIONS, - operation_id="synchronise_meta_data_table", - summary="Manually triggers the synchronisation of the file meta data table in the database", -) -async def synchronise_meta_data_table( - location_id: LocationID, dry_run: bool = False, fire_and_forget: bool = False -): - """Returns an object containing added, changed and removed paths""" - - -@app.get( - f"/{API_VTAG}/locations/{{location_id}}/files/metadata", - response_model=Envelope[list[DatasetMetaData]], - tags=TAGS_FILES, - operation_id="get_files_metadata", - summary="Get datasets metadata", -) -async def get_files_metadata( - location_id: LocationID, - uuid_filter: str = "", - expand_dirs: bool = Query( - True, - description=( - "Automatic directory expansion. This will be replaced by pagination the future" - ), - ), -): - """returns all the file meta data a user has access to (uuid_filter may be used)""" - - -@app.get( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}/metadata", - response_model=FileMetaData | Envelope[FileMetaDataGet], - tags=TAGS_FILES, - summary="Get File Metadata", - operation_id="get_file_metadata", -) -async def get_file_metadata( - location_id: LocationID, file_id: StorageFileID, user_id: UserID -): - """returns the file meta data of file_id if user_id has the rights to""" - - -@app.get( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}", - response_model=Envelope[PresignedLink], - tags=TAGS_FILES, - operation_id="download_file", - summary="Returns download link for requested file", -) -async def download_file( - location_id: LocationID, - file_id: StorageFileID, - user_id: UserID, - link_type: LinkType = LinkType.PRESIGNED, -): - """creates a download file link if user has the rights to""" - - -@app.put( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}", - response_model=Envelope[FileUploadSchema] | Envelope[AnyUrl], - tags=TAGS_FILES, - operation_id="upload_file", - summary="Returns upload link", -) -async def upload_file( - location_id: LocationID, - file_id: StorageFileID, - file_size: ByteSize | None, - link_type: LinkType = LinkType.PRESIGNED, - is_directory: bool = False, -): - """creates one or more upload file links if user has the rights to, expects the client to complete/abort upload""" - - -@app.post( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}:abort", - status_code=status.HTTP_204_NO_CONTENT, - tags=TAGS_FILES, - operation_id="abort_upload_file", -) -async def abort_upload_file( - location_id: LocationID, file_id: StorageFileID, user_id: UserID -): - """aborts an upload if user has the rights to, and reverts - to the latest version if available, else will delete the file""" - - -@app.post( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}:complete", - status_code=status.HTTP_202_ACCEPTED, - response_model=Envelope[FileUploadCompleteResponse], - tags=TAGS_FILES, - operation_id="complete_upload_file", -) -async def complete_upload_file( - body_item: Envelope[FileUploadCompletionBody], - location_id: LocationID, - file_id: StorageFileID, - user_id: UserID, -): - """completes an upload if the user has the rights to""" - - -@app.post( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}:complete/futures/{{future_id}}", - response_model=Envelope[FileUploadCompleteFutureResponse], - tags=TAGS_FILES, - summary="Check for upload completion", - operation_id="is_completed_upload_file", -) -async def is_completed_upload_file( - location_id: LocationID, file_id: StorageFileID, future_id: str, user_id: UserID -): - """Returns state of upload completion""" - - -# handlers_health.py - - -@app.get( - f"/{API_VTAG}/", - response_model=Envelope[HealthCheck], - tags=TAGS_HEALTH, - summary="health check endpoint", - operation_id="health_check", -) -async def get_health(): - """Current service health""" - - -@app.get( - f"/{API_VTAG}/status", - response_model=Envelope[AppStatusCheck], - tags=TAGS_HEALTH, - summary="returns the status of the services inside", - operation_id="get_status", -) -async def get_status(): - """returns the status of all the external dependencies""" - - -# handlers_locations.py - - -@app.delete( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}", - status_code=status.HTTP_204_NO_CONTENT, - tags=TAGS_FILES, - operation_id="delete_file", - summary="Deletes File", -) -async def delete_file(location_id: LocationID, file_id: StorageFileID, user_id: UserID): - """deletes file if user has the rights to""" - - -@app.post( - f"/{API_VTAG}/files/{{file_id}}:soft-copy", - response_model=FileMetaDataGet, - tags=TAGS_FILES, - summary="copy file as soft link", - operation_id="copy_as_soft_link", -) -async def copy_as_soft_link( - body_item: SoftCopyBody, file_id: StorageFileID, user_id: UserID -): - """creates and returns a soft link""" - - -# handlers_simcore_s3.py - - -@app.post( - f"/{API_VTAG}/simcore-s3:access", - response_model=Envelope[S3Settings], - tags=TAGS_SIMCORE_S3, - summary="gets or creates the a temporary access", - operation_id="get_or_create_temporary_s3_access", -) -async def get_or_create_temporary_s3_access(user_id: UserID): - """returns a set of S3 credentials""" - - -@app.post( - f"/{API_VTAG}/simcore-s3/folders", - response_model=Envelope[TaskGet], - tags=TAGS_SIMCORE_S3, - summary="copies folders from project", - operation_id="copy_folders_from_project", -) -async def copy_folders_from_project(body_item: FoldersBody, user_id: UserID): - """copies folders from project""" - - -@app.delete( - f"/{API_VTAG}/simcore-s3/folders/{{folder_id}}", - status_code=status.HTTP_204_NO_CONTENT, - tags=TAGS_SIMCORE_S3, - summary="delete folders from project", - operation_id="delete_folders_of_project", -) -async def delete_folders_of_project( - folder_id: str, user_id: UserID, node_id: NodeID | None = None -): - """removes folders from a project""" - - -@app.post( - f"/{API_VTAG}/simcore-s3/files/metadata:search", - response_model=Envelope[FileMetaDataGet], - tags=TAGS_SIMCORE_S3, - summary="search for owned files", - operation_id="search_files", -) -async def search_files(_query_params: Annotated[SearchFilesQueryParams, Depends()]): - """search for files starting with `startswith` and/or matching a sha256_checksum in the file_meta_data table""" - - -# long_running_tasks.py - - -@app.get( - f"/{API_VTAG}/futures", - response_model=Envelope[TaskGet], - tags=TAGS_TASKS, - summary="list current long running tasks", - operation_id="list_tasks", -) -async def list_tasks(): - """list current long running tasks""" - - -@app.get( - f"/{API_VTAG}/futures/{{task_id}}", - response_model=Envelope[TaskStatus], - tags=TAGS_TASKS, - summary="gets the status of the task", - operation_id="get_task_status", -) -async def get_task_status(task_id: TaskId): - """gets the status of the task""" - - -@app.get( - f"/{API_VTAG}/futures/{{task_id}}/result", - response_model=Any, - tags=TAGS_TASKS, - summary="get result of the task", - operation_id="get_task_result", -) -async def get_task_result(task_id: TaskId): - """get result of the task""" - - -@app.delete( - f"/{API_VTAG}/futures/{{task_id}}", - status_code=status.HTTP_204_NO_CONTENT, - tags=TAGS_TASKS, - summary="cancels and removes the task", - operation_id="cancel_and_delete_task", -) -async def cancel_and_delete_task(task_id: TaskId): - """cancels and removes the task""" - - -if __name__ == "__main__": - openapi = create_openapi_specs(app, drop_fastapi_default_422=True) - - oas_path = storage_resources.get_path("api/v0/openapi.yaml").resolve() - print(f"Writing {oas_path}...", end=None) diff --git a/api/specs/storage/requirements.txt b/api/specs/storage/requirements.txt deleted file mode 100644 index 52c5ebf91cb..00000000000 --- a/api/specs/storage/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -# Extra reqs, besides webserver's ---constraint ../../../requirements/constraints.txt - -fastapi diff --git a/api/specs/web-server/Makefile b/api/specs/web-server/Makefile index 4442c262d89..168d7c9ec78 100644 --- a/api/specs/web-server/Makefile +++ b/api/specs/web-server/Makefile @@ -16,3 +16,6 @@ install-dev install: _check_venv_active .PHONY: all all: _check_venv_active install python openapi.py + +.PHONY: openapi-specs +openapi-specs: all diff --git a/api/specs/web-server/_auth.py b/api/specs/web-server/_auth.py index 085e8d169c4..978dcef3d63 100644 --- a/api/specs/web-server/_auth.py +++ b/api/specs/web-server/_auth.py @@ -47,8 +47,7 @@ operation_id="request_product_account", status_code=status.HTTP_204_NO_CONTENT, ) -async def request_product_account(_body: AccountRequestInfo): - ... +async def request_product_account(_body: AccountRequestInfo): ... @router.post( @@ -75,8 +74,7 @@ async def register(_body: RegisterBody): status_code=status.HTTP_200_OK, responses={status.HTTP_409_CONFLICT: {"model": EnvelopedError}}, ) -async def unregister_account(_body: UnregisterCheck): - ... +async def unregister_account(_body: UnregisterCheck): ... @router.post( @@ -171,26 +169,24 @@ async def check_auth(): @router.post( "/auth/reset-password", response_model=Envelope[Log], - operation_id="auth_reset_password", + operation_id="initiate_reset_password", responses={status.HTTP_503_SERVICE_UNAVAILABLE: {"model": EnvelopedError}}, ) -async def reset_password(_body: ResetPasswordBody): - """a non logged-in user requests a password reset""" +async def initiate_reset_password(_body: ResetPasswordBody): ... @router.post( "/auth/reset-password/{code}", response_model=Envelope[Log], - operation_id="auth_reset_password_allowed", + operation_id="complete_reset_password", responses={ status.HTTP_401_UNAUTHORIZED: { "model": EnvelopedError, - "description": "unauthorized reset due to invalid token code", + "description": "Invalid token code", } }, ) -async def reset_password_allowed(code: str, _body: ResetPasswordConfirmation): - """changes password using a token code without being logged in""" +async def complete_reset_password(code: str, _body: ResetPasswordConfirmation): ... @router.post( @@ -268,5 +264,4 @@ async def email_confirmation(code: str): status_code=status.HTTP_200_OK, responses={status.HTTP_200_OK: {"content": {"image/png": {}}}}, ) -async def request_captcha(): - ... +async def request_captcha(): ... diff --git a/api/specs/web-server/_auth_api_keys.py b/api/specs/web-server/_auth_api_keys.py index 0c6512eddda..664a5165977 100644 --- a/api/specs/web-server/_auth_api_keys.py +++ b/api/specs/web-server/_auth_api_keys.py @@ -9,8 +9,10 @@ from models_library.generics import Envelope from models_library.rest_error import EnvelopedError from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.api_keys._exceptions_handlers import _TO_HTTP_ERROR_MAP -from simcore_service_webserver.api_keys._rest import ApiKeysPathParams +from simcore_service_webserver.api_keys._controller_rest import ApiKeysPathParams +from simcore_service_webserver.api_keys._controller_rest_exceptions import ( + _TO_HTTP_ERROR_MAP, +) router = APIRouter( prefix=f"/{API_VTAG}", diff --git a/api/specs/web-server/_catalog.py b/api/specs/web-server/_catalog.py index 729ede624de..4902e331f88 100644 --- a/api/specs/web-server/_catalog.py +++ b/api/specs/web-server/_catalog.py @@ -3,6 +3,7 @@ from fastapi import APIRouter, Depends from models_library.api_schemas_api_server.pricing_plans import ServicePricingPlanGet from models_library.api_schemas_webserver.catalog import ( + CatalogLatestServiceGet, CatalogServiceGet, CatalogServiceUpdate, ServiceInputGet, @@ -14,13 +15,13 @@ from models_library.generics import Envelope from models_library.rest_pagination import Page from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.catalog._handlers import ( +from simcore_service_webserver.catalog._controller_rest_schemas import ( + FromServiceOutputQueryParams, ListServiceParams, + ServiceInputsPathParams, + ServiceOutputsPathParams, ServicePathParams, - _FromServiceOutputParams, - _ServiceInputsPathParams, - _ServiceOutputsPathParams, - _ToServiceInputsParams, + ToServiceInputsQueryParams, ) router = APIRouter( @@ -31,25 +32,18 @@ ) -# -# /catalog/services/* COLLECTION -# - - @router.get( "/catalog/services/-/latest", - response_model=Page[CatalogServiceGet], + response_model=Page[CatalogLatestServiceGet], ) -def list_services_latest(_query_params: Annotated[ListServiceParams, Depends()]): - pass +def list_services_latest(_query: Annotated[ListServiceParams, Depends()]): ... @router.get( "/catalog/services/{service_key}/{service_version}", response_model=Envelope[CatalogServiceGet], ) -def get_service(_path_params: Annotated[ServicePathParams, Depends()]): - ... +def get_service(_path: Annotated[ServicePathParams, Depends()]): ... @router.patch( @@ -57,10 +51,9 @@ def get_service(_path_params: Annotated[ServicePathParams, Depends()]): response_model=Envelope[CatalogServiceGet], ) def update_service( - _path_params: Annotated[ServicePathParams, Depends()], - _update: CatalogServiceUpdate, -): - ... + _path: Annotated[ServicePathParams, Depends()], + _body: CatalogServiceUpdate, +): ... @router.get( @@ -68,9 +61,8 @@ def update_service( response_model=Envelope[list[ServiceInputGet]], ) def list_service_inputs( - _path_params: Annotated[ServicePathParams, Depends()], -): - ... + _path: Annotated[ServicePathParams, Depends()], +): ... @router.get( @@ -78,9 +70,8 @@ def list_service_inputs( response_model=Envelope[ServiceInputGet], ) def get_service_input( - _path_params: Annotated[_ServiceInputsPathParams, Depends()], -): - ... + _path: Annotated[ServiceInputsPathParams, Depends()], +): ... @router.get( @@ -88,10 +79,9 @@ def get_service_input( response_model=Envelope[list[ServiceInputKey]], ) def get_compatible_inputs_given_source_output( - _path_params: Annotated[ServicePathParams, Depends()], - _query_params: Annotated[_FromServiceOutputParams, Depends()], -): - ... + _path: Annotated[ServicePathParams, Depends()], + _query: Annotated[FromServiceOutputQueryParams, Depends()], +): ... @router.get( @@ -99,9 +89,8 @@ def get_compatible_inputs_given_source_output( response_model=Envelope[list[ServiceOutputKey]], ) def list_service_outputs( - _path_params: Annotated[ServicePathParams, Depends()], -): - ... + _path: Annotated[ServicePathParams, Depends()], +): ... @router.get( @@ -109,9 +98,8 @@ def list_service_outputs( response_model=Envelope[list[ServiceOutputGet]], ) def get_service_output( - _path_params: Annotated[_ServiceOutputsPathParams, Depends()], -): - ... + _path: Annotated[ServiceOutputsPathParams, Depends()], +): ... @router.get( @@ -119,10 +107,9 @@ def get_service_output( response_model=Envelope[list[ServiceOutputKey]], ) def get_compatible_outputs_given_target_input( - _path_params: Annotated[ServicePathParams, Depends()], - _query_params: Annotated[_ToServiceInputsParams, Depends()], -): - ... + _path: Annotated[ServicePathParams, Depends()], + _query: Annotated[ToServiceInputsQueryParams, Depends()], +): ... @router.get( @@ -130,18 +117,16 @@ def get_compatible_outputs_given_target_input( response_model=Envelope[ServiceResourcesGet], ) def get_service_resources( - _params: Annotated[ServicePathParams, Depends()], -): - ... + _path: Annotated[ServicePathParams, Depends()], +): ... @router.get( "/catalog/services/{service_key:path}/{service_version}/pricing-plan", response_model=Envelope[ServicePricingPlanGet], - summary="Retrieve default pricing plan for provided service", + description="Retrieve default pricing plan for provided service", tags=["pricing-plans"], ) async def get_service_pricing_plan( - _params: Annotated[ServicePathParams, Depends()], -): - ... + _path: Annotated[ServicePathParams, Depends()], +): ... diff --git a/api/specs/web-server/_catalog_tags.py b/api/specs/web-server/_catalog_tags.py index 26e90d952a4..9b66b92dfb2 100644 --- a/api/specs/web-server/_catalog_tags.py +++ b/api/specs/web-server/_catalog_tags.py @@ -10,7 +10,7 @@ from models_library.api_schemas_webserver.catalog import CatalogServiceGet from models_library.generics import Envelope from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.catalog._tags_handlers import ( +from simcore_service_webserver.catalog._controller_rest_schemas import ( ServicePathParams, ServiceTagPathParams, ) @@ -31,8 +31,7 @@ ) def list_service_tags( _path_params: Annotated[ServicePathParams, Depends()], -): - ... +): ... @router.post( @@ -41,8 +40,7 @@ def list_service_tags( ) def add_service_tag( _path_params: Annotated[ServiceTagPathParams, Depends()], -): - ... +): ... @router.post( @@ -51,5 +49,4 @@ def add_service_tag( ) def remove_service_tag( _path_params: Annotated[ServiceTagPathParams, Depends()], -): - ... +): ... diff --git a/api/specs/web-server/_diagnostics.py b/api/specs/web-server/_diagnostics.py index c74f314b3ec..85422308418 100644 --- a/api/specs/web-server/_diagnostics.py +++ b/api/specs/web-server/_diagnostics.py @@ -36,7 +36,7 @@ async def healthcheck_liveness_probe(): @router.get( "/config", - summary="Front end runtime configuration", + description="Front end runtime configuration", response_model=Envelope[dict[str, Any]], ) async def get_config(): @@ -53,7 +53,7 @@ async def get_scheduled_maintenance(): @router.get( "/status", - summary="checks status of self and connected services", + description="checks status of self and connected services", response_model=Envelope[AppStatusCheck], response_description="Returns app status check", ) diff --git a/api/specs/web-server/_folders.py b/api/specs/web-server/_folders.py index aa0f88c8d93..4c97c697743 100644 --- a/api/specs/web-server/_folders.py +++ b/api/specs/web-server/_folders.py @@ -105,7 +105,7 @@ async def delete_folder( @router.post( "/folders/{folder_id}/workspaces/{workspace_id}:move", status_code=status.HTTP_204_NO_CONTENT, - summary="Move folder to the workspace", + description="Move folder to the workspace", tags=["workspaces"], ) async def move_folder_to_workspace( diff --git a/api/specs/web-server/_licensed_items.py b/api/specs/web-server/_licensed_items.py index bb06c70d09d..3385028c1ce 100644 --- a/api/specs/web-server/_licensed_items.py +++ b/api/specs/web-server/_licensed_items.py @@ -9,9 +9,11 @@ from typing import Annotated from _common import as_query -from fastapi import APIRouter, Depends, status +from fastapi import APIRouter, Depends from models_library.api_schemas_webserver.licensed_items import LicensedItemRestGet -from models_library.generics import Envelope +from models_library.api_schemas_webserver.licensed_items_purchases import ( + LicensedItemPurchaseGet, +) from models_library.rest_error import EnvelopedError from models_library.rest_pagination import Page from simcore_service_webserver._meta import API_VTAG @@ -46,19 +48,9 @@ async def list_licensed_items( ... -@router.get( - "/catalog/licensed-items/{licensed_item_id}", - response_model=Envelope[LicensedItemRestGet], -) -async def get_licensed_item( - _path: Annotated[LicensedItemsPathParams, Depends()], -): - ... - - @router.post( "/catalog/licensed-items/{licensed_item_id}:purchase", - status_code=status.HTTP_204_NO_CONTENT, + response_model=LicensedItemPurchaseGet, ) async def purchase_licensed_item( _path: Annotated[LicensedItemsPathParams, Depends()], diff --git a/api/specs/web-server/_long_running_tasks.py b/api/specs/web-server/_long_running_tasks.py index 859bd470d29..0dacf42a03b 100644 --- a/api/specs/web-server/_long_running_tasks.py +++ b/api/specs/web-server/_long_running_tasks.py @@ -4,13 +4,17 @@ # pylint: disable=too-many-arguments -from typing import Annotated +from typing import Annotated, Any from fastapi import APIRouter, Depends, status from models_library.generics import Envelope +from models_library.rest_error import EnvelopedError from servicelib.aiohttp.long_running_tasks._routes import _PathParam from servicelib.long_running_tasks._models import TaskGet, TaskStatus from simcore_service_webserver._meta import API_VTAG +from simcore_service_webserver.tasks._exception_handlers import ( + _TO_HTTP_ERROR_MAP as data_export_http_error_map, +) router = APIRouter( prefix=f"/{API_VTAG}", @@ -19,37 +23,52 @@ ], ) +_data_export_responses: dict[int | str, dict[str, Any]] = { + i.status_code: {"model": EnvelopedError} + for i in data_export_http_error_map.values() +} + @router.get( "/tasks", response_model=Envelope[list[TaskGet]], + name="list_tasks", + description="Lists all long running tasks", + responses=_data_export_responses, ) -def list_tasks(): - ... +def get_async_jobs(): ... @router.get( "/tasks/{task_id}", response_model=Envelope[TaskStatus], + name="get_task_status", + description="Retrieves the status of a task", + responses=_data_export_responses, ) -def get_task_status( +def get_async_job_status( _path_params: Annotated[_PathParam, Depends()], -): - ... +): ... @router.delete( "/tasks/{task_id}", + name="cancel_and_delete_task", + description="Cancels and deletes a task", + responses=_data_export_responses, status_code=status.HTTP_204_NO_CONTENT, ) -def cancel_and_delete_task( +def abort_async_job( _path_params: Annotated[_PathParam, Depends()], -): - ... +): ... -@router.get("/tasks/{task_id}/result") -def get_task_result( +@router.get( + "/tasks/{task_id}/result", + name="get_task_result", + description="Retrieves the result of a task", + responses=_data_export_responses, +) +def get_async_job_result( _path_params: Annotated[_PathParam, Depends()], -): - ... +): ... diff --git a/api/specs/web-server/_metamodeling.py b/api/specs/web-server/_metamodeling.py deleted file mode 100644 index 5dfeaf4dfdc..00000000000 --- a/api/specs/web-server/_metamodeling.py +++ /dev/null @@ -1,40 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - - -from typing import Annotated - -from fastapi import APIRouter, Depends -from models_library.rest_pagination import Page -from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.meta_modeling._handlers import ( - ParametersModel, - ProjectIterationItem, - ProjectIterationResultItem, -) - -router = APIRouter( - prefix=f"/{API_VTAG}", - tags=[ - "projects", - "metamodeling", - ], -) - - -@router.get( - "/projects/{project_uuid}/checkpoint/{ref_id}/iterations", - response_model=Page[ProjectIterationItem], -) -def list_project_iterations(_params: Annotated[ParametersModel, Depends()]): - ... - - -@router.get( - "/projects/{project_uuid}/checkpoint/{ref_id}/iterations/-/results", - response_model=Page[ProjectIterationResultItem], -) -def list_project_iterations_results(_params: Annotated[ParametersModel, Depends()]): - ... diff --git a/api/specs/web-server/_products.py b/api/specs/web-server/_products.py index 58fdcc15477..a77f50f3193 100644 --- a/api/specs/web-server/_products.py +++ b/api/specs/web-server/_products.py @@ -7,19 +7,18 @@ from typing import Annotated -from fastapi import APIRouter, Depends, status -from models_library.api_schemas_webserver.product import ( - GenerateInvitation, - GetCreditPrice, - GetProduct, +from fastapi import APIRouter, Depends +from models_library.api_schemas_webserver.products import ( + CreditPriceGet, + InvitationGenerate, InvitationGenerated, - UpdateProductTemplate, + ProductGet, + ProductUIGet, ) from models_library.generics import Envelope from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.products._handlers import ( - _ProductsRequestParams, - _ProductTemplateParams, +from simcore_service_webserver.products._controller.rest_schemas import ( + ProductsRequestParams, ) router = APIRouter( @@ -32,34 +31,27 @@ @router.get( "/credits-price", - response_model=Envelope[GetCreditPrice], + response_model=Envelope[CreditPriceGet], ) -async def get_current_product_price(): - ... +async def get_current_product_price(): ... @router.get( "/products/{product_name}", - response_model=Envelope[GetProduct], + response_model=Envelope[ProductGet], + description="NOTE: `/products/current` is used to define current project w/o naming it", tags=[ "po", ], ) -async def get_product(_params: Annotated[_ProductsRequestParams, Depends()]): - ... +async def get_product(_params: Annotated[ProductsRequestParams, Depends()]): ... -@router.put( - "/products/{product_name}/templates/{template_id}", - status_code=status.HTTP_204_NO_CONTENT, - tags=[ - "po", - ], +@router.get( + "/products/current/ui", + response_model=Envelope[ProductUIGet], ) -async def update_product_template( - _params: Annotated[_ProductTemplateParams, Depends()], _body: UpdateProductTemplate -): - ... +async def get_current_product_ui(): ... @router.post( @@ -69,5 +61,4 @@ async def update_product_template( "po", ], ) -async def generate_invitation(_body: GenerateInvitation): - ... +async def generate_invitation(_body: InvitationGenerate): ... diff --git a/api/specs/web-server/_projects_crud.py b/api/specs/web-server/_projects.py similarity index 78% rename from api/specs/web-server/_projects_crud.py rename to api/specs/web-server/_projects.py index 62fe7684c68..87a5cea975e 100644 --- a/api/specs/web-server/_projects_crud.py +++ b/api/specs/web-server/_projects.py @@ -1,4 +1,4 @@ -""" Helper script to automatically generate OAS +"""Helper script to automatically generate OAS This OAS are the source of truth """ @@ -27,13 +27,19 @@ from models_library.generics import Envelope from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.rest_error import EnvelopedError from models_library.rest_pagination import Page from pydantic import BaseModel from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._common.models import ProjectPathParams -from simcore_service_webserver.projects._crud_handlers import ProjectCreateParams -from simcore_service_webserver.projects._crud_handlers_models import ( +from simcore_service_webserver.projects._controller._rest_exceptions import ( + _TO_HTTP_ERROR_MAP, +) +from simcore_service_webserver.projects._controller._rest_schemas import ( + ProjectPathParams, +) +from simcore_service_webserver.projects._controller.projects_rest_schemas import ( ProjectActiveQueryParams, + ProjectCreateQueryParams, ProjectsListQueryParams, ProjectsSearchQueryParams, ) @@ -43,6 +49,9 @@ tags=[ "projects", ], + responses={ + i.status_code: {"model": EnvelopedError} for i in _TO_HTTP_ERROR_MAP.values() + }, ) @@ -67,15 +76,16 @@ class _ProjectCreateHeaderParams(BaseModel): @router.post( "/projects", response_model=Envelope[TaskGet], - summary="Creates a new project or copies an existing one", - status_code=status.HTTP_201_CREATED, + description="Creates a new project or copies an existing one. " + "NOTE: implemented as a long running task, " + "i.e. requires polling `status_href` (HTTP_200_OK) to get status and `result_href` (HTTP_201_CREATED) to get created project", + status_code=status.HTTP_202_ACCEPTED, ) async def create_project( _h: Annotated[_ProjectCreateHeaderParams, Depends()], - _path: Annotated[ProjectCreateParams, Depends()], + _query: Annotated[ProjectCreateQueryParams, Depends()], _body: ProjectCreateNew | ProjectCopyOverride, -): - ... +): ... @router.get( @@ -84,8 +94,7 @@ async def create_project( ) async def list_projects( _query: Annotated[as_query(ProjectsListQueryParams), Depends()], -): - ... +): ... @router.get( @@ -94,8 +103,7 @@ async def list_projects( ) async def get_active_project( _query: Annotated[ProjectActiveQueryParams, Depends()], -): - ... +): ... @router.get( @@ -104,8 +112,7 @@ async def get_active_project( ) async def get_project( _path: Annotated[ProjectPathParams, Depends()], -): - ... +): ... @router.patch( @@ -116,8 +123,7 @@ async def get_project( async def patch_project( _path: Annotated[ProjectPathParams, Depends()], _body: ProjectPatch, -): - ... +): ... @router.delete( @@ -126,8 +132,7 @@ async def patch_project( ) async def delete_project( _path: Annotated[ProjectPathParams, Depends()], -): - ... +): ... @router.post( @@ -137,8 +142,7 @@ async def delete_project( ) async def clone_project( _path: Annotated[ProjectPathParams, Depends()], -): - ... +): ... @router.get( @@ -147,8 +151,7 @@ async def clone_project( ) async def list_projects_full_search( _query: Annotated[as_query(ProjectsSearchQueryParams), Depends()], -): - ... +): ... @router.get( @@ -158,5 +161,4 @@ async def list_projects_full_search( ) async def get_project_inactivity( _path: Annotated[ProjectPathParams, Depends()], -): - ... +): ... diff --git a/api/specs/web-server/_projects_comments.py b/api/specs/web-server/_projects_comments.py index 9f1ac0f1bd2..21b045af945 100644 --- a/api/specs/web-server/_projects_comments.py +++ b/api/specs/web-server/_projects_comments.py @@ -1,4 +1,4 @@ -""" Helper script to automatically generate OAS +"""Helper script to automatically generate OAS This OAS are the source of truth """ @@ -17,7 +17,7 @@ from models_library.projects_comments import CommentID, ProjectsCommentsAPI from pydantic import NonNegativeInt from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._comments_handlers import ( +from simcore_service_webserver.projects._controller.comments_rest import ( _ProjectCommentsBodyParams, _ProjectCommentsPathParams, _ProjectCommentsWithCommentPathParams, @@ -40,13 +40,12 @@ @router.post( "/projects/{project_uuid}/comments", response_model=Envelope[dict[Literal["comment_id"], CommentID]], - summary="Create a new comment for a specific project. The request body should contain the comment contents and user information.", + description="Create a new comment for a specific project. The request body should contain the comment contents and user information.", status_code=201, ) async def create_project_comment( project_uuid: ProjectID, body: _ProjectCommentsBodyParams -): - ... +): ... assert_handler_signature_against_model( @@ -57,12 +56,11 @@ async def create_project_comment( @router.get( "/projects/{project_uuid}/comments", response_model=Envelope[list[ProjectsCommentsAPI]], - summary="Retrieve all comments for a specific project.", + description="Retrieve all comments for a specific project.", ) async def list_project_comments( project_uuid: ProjectID, limit: int = 20, offset: NonNegativeInt = 0 -): - ... +): ... assert_handler_signature_against_model( @@ -73,14 +71,13 @@ async def list_project_comments( @router.put( "/projects/{project_uuid}/comments/{comment_id}", response_model=Envelope[ProjectsCommentsAPI], - summary="Update the contents of a specific comment for a project. The request body should contain the updated comment contents.", + description="Update the contents of a specific comment for a project. The request body should contain the updated comment contents.", ) async def update_project_comment( project_uuid: ProjectID, comment_id: CommentID, body: _ProjectCommentsBodyParams, -): - ... +): ... assert_handler_signature_against_model( @@ -90,11 +87,10 @@ async def update_project_comment( @router.delete( "/projects/{project_uuid}/comments/{comment_id}", - summary="Delete a specific comment associated with a project.", + description="Delete a specific comment associated with a project.", status_code=204, ) -async def delete_project_comment(project_uuid: ProjectID, comment_id: CommentID): - ... +async def delete_project_comment(project_uuid: ProjectID, comment_id: CommentID): ... assert_handler_signature_against_model( @@ -105,10 +101,9 @@ async def delete_project_comment(project_uuid: ProjectID, comment_id: CommentID) @router.get( "/projects/{project_uuid}/comments/{comment_id}", response_model=Envelope[ProjectsCommentsAPI], - summary="Retrieve a specific comment by its ID within a project.", + description="Retrieve a specific comment by its ID within a project.", ) -async def get_project_comment(project_uuid: ProjectID, comment_id: CommentID): - ... +async def get_project_comment(project_uuid: ProjectID, comment_id: CommentID): ... assert_handler_signature_against_model( diff --git a/api/specs/web-server/_projects_folders.py b/api/specs/web-server/_projects_folders.py index ef6f7f2cf31..f3c5b337b49 100644 --- a/api/specs/web-server/_projects_folders.py +++ b/api/specs/web-server/_projects_folders.py @@ -1,4 +1,4 @@ -""" Helper script to automatically generate OAS +"""Helper script to automatically generate OAS This OAS are the source of truth """ @@ -13,7 +13,7 @@ from fastapi import APIRouter, Depends, status from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._folders_handlers import ( +from simcore_service_webserver.projects._controller.folders_rest import ( _ProjectsFoldersPathParams, ) @@ -26,9 +26,8 @@ @router.put( "/projects/{project_id}/folders/{folder_id}", status_code=status.HTTP_204_NO_CONTENT, - summary="Move project to the folder", + description="Move project to the folder", ) async def replace_project_folder( _path: Annotated[_ProjectsFoldersPathParams, Depends()], -): - ... +): ... diff --git a/api/specs/web-server/_projects_groups.py b/api/specs/web-server/_projects_groups.py index cfc0870d6a8..091e374e4ca 100644 --- a/api/specs/web-server/_projects_groups.py +++ b/api/specs/web-server/_projects_groups.py @@ -9,12 +9,14 @@ from fastapi import APIRouter, Depends, status from models_library.generics import Envelope from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._common.models import ProjectPathParams -from simcore_service_webserver.projects._groups_api import ProjectGroupGet -from simcore_service_webserver.projects._groups_handlers import ( +from simcore_service_webserver.projects._controller._rest_schemas import ( + ProjectPathParams, +) +from simcore_service_webserver.projects._controller.groups_rest import ( _ProjectsGroupsBodyParams, _ProjectsGroupsPathParams, ) +from simcore_service_webserver.projects._groups_service import ProjectGroupGet router = APIRouter( prefix=f"/{API_VTAG}", @@ -30,16 +32,14 @@ async def create_project_group( _path: Annotated[_ProjectsGroupsPathParams, Depends()], _body: _ProjectsGroupsBodyParams, -): - ... +): ... @router.get( "/projects/{project_id}/groups", response_model=Envelope[list[ProjectGroupGet]], ) -async def list_project_groups(_path: Annotated[ProjectPathParams, Depends()]): - ... +async def list_project_groups(_path: Annotated[ProjectPathParams, Depends()]): ... @router.put( @@ -49,13 +49,13 @@ async def list_project_groups(_path: Annotated[ProjectPathParams, Depends()]): async def replace_project_group( _path: Annotated[_ProjectsGroupsPathParams, Depends()], _body: _ProjectsGroupsBodyParams, -): - ... +): ... @router.delete( "/projects/{project_id}/groups/{group_id}", status_code=status.HTTP_204_NO_CONTENT, ) -async def delete_project_group(_path: Annotated[_ProjectsGroupsPathParams, Depends()]): - ... +async def delete_project_group( + _path: Annotated[_ProjectsGroupsPathParams, Depends()], +): ... diff --git a/api/specs/web-server/_projects_metadata.py b/api/specs/web-server/_projects_metadata.py index ef6e429d7dd..0e97d475aa3 100644 --- a/api/specs/web-server/_projects_metadata.py +++ b/api/specs/web-server/_projects_metadata.py @@ -1,4 +1,4 @@ -""" Helper script to automatically generate OAS +"""Helper script to automatically generate OAS This OAS are the source of truth """ @@ -18,7 +18,9 @@ ) from models_library.generics import Envelope from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._metadata_handlers import ProjectPathParams +from simcore_service_webserver.projects._controller.metadata_rest import ( + ProjectPathParams, +) router = APIRouter(prefix=f"/{API_VTAG}", tags=["projects", "metadata"]) @@ -33,8 +35,7 @@ response_model=Envelope[ProjectMetadataGet], status_code=status.HTTP_200_OK, ) -async def get_project_metadata(_params: Annotated[ProjectPathParams, Depends()]): - ... +async def get_project_metadata(_params: Annotated[ProjectPathParams, Depends()]): ... @router.patch( @@ -44,5 +45,4 @@ async def get_project_metadata(_params: Annotated[ProjectPathParams, Depends()]) ) async def update_project_metadata( _params: Annotated[ProjectPathParams, Depends()], _body: ProjectMetadataUpdate -): - ... +): ... diff --git a/api/specs/web-server/_projects_nodes.py b/api/specs/web-server/_projects_nodes.py index 50c2ba73a1a..454d7c4f733 100644 --- a/api/specs/web-server/_projects_nodes.py +++ b/api/specs/web-server/_projects_nodes.py @@ -18,6 +18,7 @@ NodePatch, NodeRetrieve, NodeRetrieved, + ProjectNodeServicesGet, ServiceResourcesDict, ) from models_library.generics import Envelope @@ -25,12 +26,14 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._crud_handlers import ProjectPathParams -from simcore_service_webserver.projects._nodes_handlers import ( +from simcore_service_webserver.projects._controller.nodes_rest import ( NodePathParams, _ProjectGroupAccess, _ProjectNodePreview, ) +from simcore_service_webserver.projects._controller.projects_rest import ( + ProjectPathParams, +) router = APIRouter( prefix=f"/{API_VTAG}", @@ -76,8 +79,7 @@ def delete_node(project_id: str, node_id: str): # noqa: ARG001 ) def retrieve_node( project_id: str, node_id: str, _retrieve: NodeRetrieve # noqa: ARG001 -): - ... +): ... @router.post( @@ -147,8 +149,7 @@ def get_node_resources(project_id: str, node_id: str): # noqa: ARG001 ) def replace_node_resources( project_id: str, node_id: str, _new: ServiceResourcesDict # noqa: ARG001 -): - ... +): ... # @@ -156,15 +157,21 @@ def replace_node_resources( # +@router.get( + "/projects/{project_id}/nodes/-/services", + response_model=Envelope[ProjectNodeServicesGet], +) +async def get_project_services(project_id: ProjectID): ... + + @router.get( "/projects/{project_id}/nodes/-/services:access", response_model=Envelope[_ProjectGroupAccess], - summary="Check whether provided group has access to the project services", + description="Check whether provided group has access to the project services", ) async def get_project_services_access_for_gid( project_id: ProjectID, for_gid: GroupID # noqa: ARG001 -): - ... +): ... assert_handler_signature_against_model( @@ -180,7 +187,7 @@ async def get_project_services_access_for_gid( @router.get( "/projects/{project_id}/nodes/-/preview", response_model=Envelope[list[_ProjectNodePreview]], - summary="Lists all previews in the node's project", + description="Lists all previews in the node's project", ) async def list_project_nodes_previews(project_id: ProjectID): # noqa: ARG001 ... @@ -192,13 +199,12 @@ async def list_project_nodes_previews(project_id: ProjectID): # noqa: ARG001 @router.get( "/projects/{project_id}/nodes/{node_id}/preview", response_model=Envelope[_ProjectNodePreview], - summary="Gets a give node's preview", + description="Gets a give node's preview", responses={status.HTTP_404_NOT_FOUND: {"description": "Node has no preview"}}, ) async def get_project_node_preview( project_id: ProjectID, node_id: NodeID # noqa: ARG001 -): - ... +): ... assert_handler_signature_against_model(get_project_node_preview, NodePathParams) diff --git a/api/specs/web-server/_projects_nodes_pricing_unit.py b/api/specs/web-server/_projects_nodes_pricing_unit.py index 4ee1ceb0714..91c7f0cdb19 100644 --- a/api/specs/web-server/_projects_nodes_pricing_unit.py +++ b/api/specs/web-server/_projects_nodes_pricing_unit.py @@ -1,4 +1,4 @@ -""" Helper script to automatically generate OAS +"""Helper script to automatically generate OAS This OAS are the source of truth """ @@ -17,10 +17,10 @@ from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import PricingPlanId, PricingUnitId from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._nodes_handlers import NodePathParams -from simcore_service_webserver.projects._projects_nodes_pricing_unit_handlers import ( +from simcore_service_webserver.projects._controller.nodes_pricing_unit_rest import ( _ProjectNodePricingUnitPathParams, ) +from simcore_service_webserver.projects._controller.nodes_rest import NodePathParams router = APIRouter( prefix=f"/{API_VTAG}", @@ -33,10 +33,9 @@ @router.get( "/projects/{project_id}/nodes/{node_id}/pricing-unit", response_model=Envelope[PricingUnitGet | None], - summary="Get currently connected pricing unit to the project node.", + description="Get currently connected pricing unit to the project node.", ) -async def get_project_node_pricing_unit(project_id: ProjectID, node_id: NodeID): - ... +async def get_project_node_pricing_unit(project_id: ProjectID, node_id: NodeID): ... assert_handler_signature_against_model(get_project_node_pricing_unit, NodePathParams) @@ -44,7 +43,7 @@ async def get_project_node_pricing_unit(project_id: ProjectID, node_id: NodeID): @router.put( "/projects/{project_id}/nodes/{node_id}/pricing-plan/{pricing_plan_id}/pricing-unit/{pricing_unit_id}", - summary="Connect pricing unit to the project node (Project node can have only one pricing unit)", + description="Connect pricing unit to the project node (Project node can have only one pricing unit)", status_code=status.HTTP_204_NO_CONTENT, ) async def connect_pricing_unit_to_project_node( @@ -52,8 +51,7 @@ async def connect_pricing_unit_to_project_node( node_id: NodeID, pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, -): - ... +): ... assert_handler_signature_against_model( diff --git a/api/specs/web-server/_projects_ports.py b/api/specs/web-server/_projects_ports.py index f7a98e53e59..a5874b8d071 100644 --- a/api/specs/web-server/_projects_ports.py +++ b/api/specs/web-server/_projects_ports.py @@ -9,7 +9,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes import NodeID from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._ports_handlers import ( +from simcore_service_webserver.projects._controller.ports_rest import ( ProjectInputGet, ProjectInputUpdate, ProjectMetadataPortGet, diff --git a/api/specs/web-server/_projects_states.py b/api/specs/web-server/_projects_states.py index 1547e8f3b35..5c1b2a5299b 100644 --- a/api/specs/web-server/_projects_states.py +++ b/api/specs/web-server/_projects_states.py @@ -14,7 +14,7 @@ from servicelib.aiohttp import status from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.director_v2.exceptions import DirectorServiceError -from simcore_service_webserver.projects._states_handlers import ( +from simcore_service_webserver.projects._controller.projects_states_rest import ( ProjectPathParams, _OpenProjectQuery, ) @@ -70,20 +70,17 @@ def open_project( client_session_id: Annotated[str, Body(...)], _path_params: Annotated[ProjectPathParams, Depends()], _query_params: Annotated[_OpenProjectQuery, Depends()], -): - ... +): ... @router.post("/projects/{project_id}:close", status_code=status.HTTP_204_NO_CONTENT) def close_project( _path_params: Annotated[ProjectPathParams, Depends()], client_session_id: Annotated[str, Body(...)], -): - ... +): ... @router.get("/projects/{project_id}/state", response_model=Envelope[ProjectState]) def get_project_state( _path_params: Annotated[ProjectPathParams, Depends()], -): - ... +): ... diff --git a/api/specs/web-server/_projects_wallet.py b/api/specs/web-server/_projects_wallet.py index 0f22d25f097..78878bc163e 100644 --- a/api/specs/web-server/_projects_wallet.py +++ b/api/specs/web-server/_projects_wallet.py @@ -1,4 +1,4 @@ -""" Helper script to automatically generate OAS +"""Helper script to automatically generate OAS This OAS are the source of truth """ @@ -17,8 +17,10 @@ from models_library.projects import ProjectID from models_library.wallets import WalletID from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._common.models import ProjectPathParams -from simcore_service_webserver.projects._wallets_handlers import ( +from simcore_service_webserver.projects._controller._rest_schemas import ( + ProjectPathParams, +) +from simcore_service_webserver.projects._controller.wallets_rest import ( _PayProjectDebtBody, _ProjectWalletPathParams, ) @@ -34,10 +36,9 @@ @router.get( "/projects/{project_id}/wallet", response_model=Envelope[WalletGet | None], - summary="Get current connected wallet to the project.", + description="Get current connected wallet to the project.", ) -async def get_project_wallet(project_id: ProjectID): - ... +async def get_project_wallet(project_id: ProjectID): ... assert_handler_signature_against_model(get_project_wallet, ProjectPathParams) @@ -46,13 +47,12 @@ async def get_project_wallet(project_id: ProjectID): @router.put( "/projects/{project_id}/wallet/{wallet_id}", response_model=Envelope[WalletGet], - summary="Connect wallet to the project (Project can have only one wallet)", + description="Connect wallet to the project (Project can have only one wallet)", ) async def connect_wallet_to_project( project_id: ProjectID, wallet_id: WalletID, -): - ... +): ... assert_handler_signature_against_model(connect_wallet_to_project, ProjectPathParams) @@ -65,8 +65,7 @@ async def connect_wallet_to_project( async def pay_project_debt( _path: Annotated[_ProjectWalletPathParams, Depends()], _body: Annotated[_PayProjectDebtBody, Depends()], -): - ... +): ... assert_handler_signature_against_model(connect_wallet_to_project, ProjectPathParams) diff --git a/api/specs/web-server/_projects_workspaces.py b/api/specs/web-server/_projects_workspaces.py index caaccfca05c..17ec63fc367 100644 --- a/api/specs/web-server/_projects_workspaces.py +++ b/api/specs/web-server/_projects_workspaces.py @@ -1,4 +1,4 @@ -""" Helper script to automatically generate OAS +"""Helper script to automatically generate OAS This OAS are the source of truth """ @@ -13,7 +13,7 @@ from fastapi import APIRouter, Depends, status from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._workspaces_handlers import ( +from simcore_service_webserver.projects._controller.workspaces_rest import ( _ProjectWorkspacesPathParams, ) @@ -26,9 +26,8 @@ @router.post( "/projects/{project_id}/workspaces/{workspace_id}:move", status_code=status.HTTP_204_NO_CONTENT, - summary="Move project to the workspace", + description="Move project to the workspace", ) async def move_project_to_workspace( _path: Annotated[_ProjectWorkspacesPathParams, Depends()], -): - ... +): ... diff --git a/api/specs/web-server/_resource_usage.py b/api/specs/web-server/_resource_usage.py index e48e8030d5e..7e202d2b5c2 100644 --- a/api/specs/web-server/_resource_usage.py +++ b/api/specs/web-server/_resource_usage.py @@ -51,7 +51,7 @@ @router.get( "/services/-/resource-usages", response_model=Page[ServiceRunGet], - summary="Retrieve finished and currently running user services" + description="Retrieve finished and currently running user services" " (user and product are taken from context, optionally wallet_id parameter might be provided).", tags=["usage"], ) @@ -64,7 +64,7 @@ async def list_resource_usage_services( @router.get( "/services/-/aggregated-usages", response_model=Page[OsparcCreditsAggregatedByServiceGet], - summary="Used credits based on aggregate by type, currently supported `services`" + description="Used credits based on aggregate by type, currently supported `services`" ". (user and product are taken from context, optionally wallet_id parameter might be provided).", tags=["usage"], ) @@ -83,7 +83,7 @@ async def list_osparc_credits_aggregated_usages( } }, tags=["usage"], - summary="Redirects to download CSV link. CSV obtains finished and currently running " + description="Redirects to download CSV link. CSV obtains finished and currently running " "user services (user and product are taken from context, optionally wallet_id parameter might be provided).", ) async def export_resource_usage_services( diff --git a/api/specs/web-server/_statics.py b/api/specs/web-server/_statics.py index cf3b846f7d7..da1a1667e02 100644 --- a/api/specs/web-server/_statics.py +++ b/api/specs/web-server/_statics.py @@ -8,7 +8,7 @@ from fastapi import APIRouter from fastapi.responses import HTMLResponse -from simcore_service_webserver._constants import INDEX_RESOURCE_NAME +from simcore_service_webserver.constants import INDEX_RESOURCE_NAME from simcore_service_webserver.statics.settings import FrontEndInfoDict router = APIRouter( diff --git a/api/specs/web-server/_storage.py b/api/specs/web-server/_storage.py index 61326395e23..56a175d7552 100644 --- a/api/specs/web-server/_storage.py +++ b/api/specs/web-server/_storage.py @@ -4,24 +4,39 @@ # pylint: disable=too-many-arguments -from typing import TypeAlias +from typing import Annotated, Any, TypeAlias -from fastapi import APIRouter, Query, status -from models_library.api_schemas_storage import ( +from fastapi import APIRouter, Depends, Query, status +from models_library.api_schemas_long_running_tasks.tasks import ( + TaskGet, +) +from models_library.api_schemas_storage.storage_schemas import ( + FileLocation, FileMetaDataGet, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, FileUploadCompletionBody, FileUploadSchema, LinkType, + PathMetaDataGet, PresignedLink, - TableSynchronisation, +) +from models_library.api_schemas_webserver.storage import ( + DataExportPost, + ListPathsQueryParams, + StorageLocationPathParams, + StoragePathComputeSizeParams, ) from models_library.generics import Envelope from models_library.projects_nodes_io import LocationID +from models_library.rest_error import EnvelopedError from pydantic import AnyUrl, ByteSize +from servicelib.fastapi.rest_pagination import CustomizedPathsCursorPage from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.storage.schemas import DatasetMetaData, FileMetaData +from simcore_service_webserver.tasks._exception_handlers import ( + _TO_HTTP_ERROR_MAP as data_export_http_error_map, +) router = APIRouter( prefix=f"/{API_VTAG}", @@ -38,40 +53,51 @@ @router.get( "/storage/locations", - response_model=list[DatasetMetaData], - summary="Get available storage locations", + response_model=list[FileLocation], + description="Get available storage locations", ) -async def get_storage_locations(): +async def list_storage_locations(): """Returns the list of available storage locations""" -@router.post( - "/storage/locations/{location_id}:sync", - response_model=Envelope[TableSynchronisation], - summary="Manually triggers the synchronisation of the file meta data table in the database", +@router.get( + "/storage/locations/{location_id}/paths", + response_model=CustomizedPathsCursorPage[PathMetaDataGet], ) -async def synchronise_meta_data_table( - location_id: LocationID, dry_run: bool = False, fire_and_forget: bool = False +async def list_storage_paths( + _path: Annotated[StorageLocationPathParams, Depends()], + _query: Annotated[ListPathsQueryParams, Depends()], ): - """Returns an object containing added, changed and removed paths""" + """Lists the files/directories in WorkingDirectory""" + + +@router.post( + "/storage/locations/{location_id}/paths/{path}:size", + response_model=Envelope[TaskGet], + status_code=status.HTTP_202_ACCEPTED, +) +async def compute_path_size(_path: Annotated[StoragePathComputeSizeParams, Depends()]): + """Compute the size of a path""" @router.get( "/storage/locations/{location_id}/datasets", response_model=Envelope[list[DatasetMetaData]], - summary="Get datasets metadata", + description="Get datasets metadata", ) -async def get_datasets_metadata(location_id: LocationID): +async def list_datasets_metadata( + _path: Annotated[StorageLocationPathParams, Depends()], +): """returns all the top level datasets a user has access to""" @router.get( "/storage/locations/{location_id}/files/metadata", response_model=Envelope[list[DatasetMetaData]], - summary="Get datasets metadata", + description="Get datasets metadata", ) async def get_files_metadata( - location_id: LocationID, + _path: Annotated[StorageLocationPathParams, Depends()], uuid_filter: str = "", expand_dirs: bool = Query( True, @@ -86,9 +112,9 @@ async def get_files_metadata( @router.get( "/storage/locations/{location_id}/datasets/{dataset_id}/metadata", response_model=Envelope[list[FileMetaDataGet]], - summary="Get Files Metadata", + description="Get Files Metadata", ) -async def get_files_metadata_dataset( +async def list_dataset_files_metadata( location_id: LocationID, dataset_id: str, expand_dirs: bool = Query( @@ -104,7 +130,7 @@ async def get_files_metadata_dataset( @router.get( "/storage/locations/{location_id}/files/{file_id}/metadata", response_model=FileMetaData | Envelope[FileMetaDataGet], - summary="Get File Metadata", + description="Get File Metadata", ) async def get_file_metadata(location_id: LocationID, file_id: StorageFileIDStr): """returns the file meta data of file_id if user_id has the rights to""" @@ -113,7 +139,7 @@ async def get_file_metadata(location_id: LocationID, file_id: StorageFileIDStr): @router.get( "/storage/locations/{location_id}/files/{file_id}", response_model=Envelope[PresignedLink], - summary="Returns download link for requested file", + description="Returns download link for requested file", ) async def download_file( location_id: LocationID, @@ -126,7 +152,7 @@ async def download_file( @router.put( "/storage/locations/{location_id}/files/{file_id}", response_model=Envelope[FileUploadSchema] | Envelope[AnyUrl], - summary="Returns upload link", + description="Returns upload link", ) async def upload_file( location_id: LocationID, @@ -141,7 +167,7 @@ async def upload_file( @router.delete( "/storage/locations/{location_id}/files/{file_id}", status_code=status.HTTP_204_NO_CONTENT, - summary="Deletes File", + description="Deletes File", ) async def delete_file(location_id: LocationID, file_id: StorageFileIDStr): """deletes file if user has the rights to""" @@ -172,9 +198,27 @@ async def complete_upload_file( @router.post( "/storage/locations/{location_id}/files/{file_id}:complete/futures/{future_id}", response_model=Envelope[FileUploadCompleteFutureResponse], - summary="Check for upload completion", + description="Check for upload completion", ) async def is_completed_upload_file( location_id: LocationID, file_id: StorageFileIDStr, future_id: str ): """Returns state of upload completion""" + + +# data export +_data_export_responses: dict[int | str, dict[str, Any]] = { + i.status_code: {"model": EnvelopedError} + for i in data_export_http_error_map.values() +} + + +@router.post( + "/storage/locations/{location_id}/export-data", + response_model=Envelope[TaskGet], + name="export_data", + description="Export data", + responses=_data_export_responses, +) +async def export_data(data_export: DataExportPost, location_id: LocationID): + """Trigger data export. Returns async job id for getting status and results""" diff --git a/api/specs/web-server/_tasks.py b/api/specs/web-server/_tasks.py deleted file mode 100644 index a2c6fbe1402..00000000000 --- a/api/specs/web-server/_tasks.py +++ /dev/null @@ -1,43 +0,0 @@ -from fastapi import APIRouter -from simcore_service_webserver._meta import API_VTAG - -router = APIRouter( - prefix=f"/{API_VTAG}", - tags=[ - "tasks", - ], -) - - -@router.get("/tasks", response_model=List[TasksGetResponse]) -def list_tasks() -> List[TasksGetResponse]: - pass - - -@router.get( - "/tasks/{task_id}", - response_model=TasksTaskIdGetResponse, - responses={"default": {"model": TasksTaskIdGetResponse1}}, -) -def get_task_status( - task_id: str, -) -> Union[TasksTaskIdGetResponse, TasksTaskIdGetResponse1]: - pass - - -@router.delete( - "/tasks/{task_id}", - response_model=None, - responses={"default": {"model": TasksTaskIdDeleteResponse}}, -) -def cancel_and_delete_task(task_id: str) -> Union[None, TasksTaskIdDeleteResponse]: - pass - - -@router.get( - "/tasks/{task_id}/result", - response_model=None, - responses={"default": {"model": TasksTaskIdResultGetResponse}}, -) -def get_task_result(task_id: str) -> Union[None, TasksTaskIdResultGetResponse]: - pass diff --git a/api/specs/web-server/_trash.py b/api/specs/web-server/_trash.py index 8f0f5ea6086..7ec30e777bc 100644 --- a/api/specs/web-server/_trash.py +++ b/api/specs/web-server/_trash.py @@ -8,13 +8,17 @@ from typing import Annotated from fastapi import APIRouter, Depends, status +from models_library.rest_error import EnvelopedError from models_library.trash import RemoveQueryParams from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.folders._common.models import ( FoldersPathParams, FolderTrashQueryParams, ) -from simcore_service_webserver.projects._trash_rest import ProjectPathParams +from simcore_service_webserver.projects._controller._rest_exceptions import ( + _TO_HTTP_ERROR_MAP, +) +from simcore_service_webserver.projects._controller.trash_rest import ProjectPathParams from simcore_service_webserver.workspaces._common.models import ( WorkspacesPathParams, WorkspaceTrashQueryParams, @@ -23,15 +27,17 @@ router = APIRouter( prefix=f"/{API_VTAG}", tags=["trash"], + responses={ + i.status_code: {"model": EnvelopedError} for i in _TO_HTTP_ERROR_MAP.values() + }, ) -@router.delete( - "/trash", +@router.post( + "/trash:empty", status_code=status.HTTP_204_NO_CONTENT, ) -def empty_trash(): - ... +def empty_trash(): ... _extra_tags: list[str | Enum] = ["projects"] @@ -42,18 +48,24 @@ def empty_trash(): tags=_extra_tags, status_code=status.HTTP_204_NO_CONTENT, responses={ - status.HTTP_404_NOT_FOUND: {"description": "Not such a project"}, + status.HTTP_404_NOT_FOUND: { + "description": "Not such a project", + "model": EnvelopedError, + }, status.HTTP_409_CONFLICT: { - "description": "Project is in use and cannot be trashed" + "description": "Project is in use and cannot be trashed", + "model": EnvelopedError, + }, + status.HTTP_503_SERVICE_UNAVAILABLE: { + "description": "Trash service error", + "model": EnvelopedError, }, - status.HTTP_503_SERVICE_UNAVAILABLE: {"description": "Trash service error"}, }, ) def trash_project( _path: Annotated[ProjectPathParams, Depends()], _query: Annotated[RemoveQueryParams, Depends()], -): - ... +): ... @router.post( @@ -63,8 +75,7 @@ def trash_project( ) def untrash_project( _path: Annotated[ProjectPathParams, Depends()], -): - ... +): ... _extra_tags = ["folders"] @@ -85,8 +96,7 @@ def untrash_project( def trash_folder( _path: Annotated[FoldersPathParams, Depends()], _query: Annotated[FolderTrashQueryParams, Depends()], -): - ... +): ... @router.post( @@ -96,8 +106,7 @@ def trash_folder( ) def untrash_folder( _path: Annotated[FoldersPathParams, Depends()], -): - ... +): ... _extra_tags = ["workspaces"] @@ -118,8 +127,7 @@ def untrash_folder( def trash_workspace( _path: Annotated[WorkspacesPathParams, Depends()], _query: Annotated[WorkspaceTrashQueryParams, Depends()], -): - ... +): ... @router.post( @@ -129,5 +137,4 @@ def trash_workspace( ) def untrash_workspace( _path: Annotated[WorkspacesPathParams, Depends()], -): - ... +): ... diff --git a/api/specs/web-server/_users.py b/api/specs/web-server/_users.py index 89d5eaaba2f..d0d733a01e3 100644 --- a/api/specs/web-server/_users.py +++ b/api/specs/web-server/_users.py @@ -39,26 +39,14 @@ "/me", response_model=Envelope[MyProfileGet], ) -async def get_my_profile(): - ... +async def get_my_profile(): ... @router.patch( "/me", status_code=status.HTTP_204_NO_CONTENT, ) -async def update_my_profile(_body: MyProfilePatch): - ... - - -@router.put( - "/me", - status_code=status.HTTP_204_NO_CONTENT, - deprecated=True, - description="Use PATCH instead", -) -async def replace_my_profile(_body: MyProfilePatch): - ... +async def update_my_profile(_body: MyProfilePatch): ... @router.patch( @@ -68,16 +56,14 @@ async def replace_my_profile(_body: MyProfilePatch): async def set_frontend_preference( preference_id: PreferenceIdentifier, _body: PatchRequestBody, -): - ... +): ... @router.get( "/me/tokens", response_model=Envelope[list[MyTokenGet]], ) -async def list_tokens(): - ... +async def list_tokens(): ... @router.post( @@ -85,8 +71,7 @@ async def list_tokens(): response_model=Envelope[MyTokenGet], status_code=status.HTTP_201_CREATED, ) -async def create_token(_body: MyTokenCreate): - ... +async def create_token(_body: MyTokenCreate): ... @router.get( @@ -95,24 +80,21 @@ async def create_token(_body: MyTokenCreate): ) async def get_token( _path: Annotated[_TokenPathParams, Depends()], -): - ... +): ... @router.delete( "/me/tokens/{service}", status_code=status.HTTP_204_NO_CONTENT, ) -async def delete_token(_path: Annotated[_TokenPathParams, Depends()]): - ... +async def delete_token(_path: Annotated[_TokenPathParams, Depends()]): ... @router.get( "/me/notifications", response_model=Envelope[list[UserNotification]], ) -async def list_user_notifications(): - ... +async def list_user_notifications(): ... @router.post( @@ -121,8 +103,7 @@ async def list_user_notifications(): ) async def create_user_notification( _body: UserNotificationCreate, -): - ... +): ... @router.patch( @@ -132,16 +113,14 @@ async def create_user_notification( async def mark_notification_as_read( _path: Annotated[_NotificationPathParams, Depends()], _body: UserNotificationPatch, -): - ... +): ... @router.get( "/me/permissions", response_model=Envelope[list[MyPermissionGet]], ) -async def list_user_permissions(): - ... +async def list_user_permissions(): ... # @@ -154,8 +133,7 @@ async def list_user_permissions(): response_model=Envelope[list[UserGet]], description="Search among users who are publicly visible to the caller (i.e., me) based on their privacy settings.", ) -async def search_users(_body: UsersSearch): - ... +async def search_users(_body: UsersSearch): ... # @@ -171,7 +149,7 @@ async def search_users(_body: UsersSearch): tags=_extra_tags, ) async def search_users_for_admin( - _query: Annotated[UsersForAdminSearchQueryParams, Depends()] + _query: Annotated[UsersForAdminSearchQueryParams, Depends()], ): # NOTE: see `Search` in `Common Custom Methods` in https://cloud.google.com/apis/design/custom_methods ... @@ -182,5 +160,4 @@ async def search_users_for_admin( response_model=Envelope[UserForAdminGet], tags=_extra_tags, ) -async def pre_register_user_for_admin(_body: PreRegisteredUserGet): - ... +async def pre_register_user_for_admin(_body: PreRegisteredUserGet): ... diff --git a/api/specs/web-server/_version_control.py b/api/specs/web-server/_version_control.py deleted file mode 100644 index 946c496b90e..00000000000 --- a/api/specs/web-server/_version_control.py +++ /dev/null @@ -1,89 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - - -from typing import Annotated, Literal - -from fastapi import APIRouter, Depends -from models_library.generics import Envelope -from models_library.projects import ProjectID -from models_library.rest_pagination import Page, PageQueryParameters -from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.version_control.models import ( - CheckpointAnnotations, - CheckpointApiModel, - CheckpointNew, - RefID, - RepoApiModel, - WorkbenchViewApiModel, -) - -router = APIRouter( - prefix=f"/{API_VTAG}", - tags=[ - "repository", - ], -) - - -@router.get("/repos/projects", response_model=Page[RepoApiModel]) -def list_repos(_query_params: Annotated[PageQueryParameters, Depends()]): - ... - - -@router.get( - "/repos/projects/{project_uuid}/checkpoints", - response_model=Page[CheckpointApiModel], -) -def list_checkpoints( - project_uuid: ProjectID, _query_params: Annotated[PageQueryParameters, Depends()] -): - ... - - -@router.post( - "/repos/projects/{project_uuid}/checkpoints", - response_model=Envelope[CheckpointApiModel], -) -def create_checkpoint(project_uuid: ProjectID, _new: CheckpointNew): - ... - - -@router.get( - "/repos/projects/{project_uuid}/checkpoints/{ref_id}", - response_model=Envelope[CheckpointApiModel], -) -def get_checkpoint(ref_id: RefID | Literal["HEAD"], project_uuid: ProjectID): - ... - - -@router.patch( - "/repos/projects/{project_uuid}/checkpoints/{ref_id}", - response_model=Envelope[CheckpointApiModel], -) -def update_checkpoint( - ref_id: RefID, - project_uuid: ProjectID, - _update: CheckpointAnnotations, -): - """ - Update Checkpoint Annotations - """ - - -@router.get( - "/repos/projects/{project_uuid}/checkpoints/{ref_id}/workbench/view", - response_model=Envelope[WorkbenchViewApiModel], -) -def view_project_workbench(ref_id: RefID, project_uuid: ProjectID): - ... - - -@router.post( - "/repos/projects/{project_uuid}/checkpoints/{ref_id}:checkout", - response_model=Envelope[CheckpointApiModel], -) -def checkout(ref_id: RefID, project_uuid: ProjectID): - ... diff --git a/api/specs/web-server/openapi.py b/api/specs/web-server/openapi.py index cfcaf183591..ebf96081107 100644 --- a/api/specs/web-server/openapi.py +++ b/api/specs/web-server/openapi.py @@ -39,10 +39,9 @@ "_licensed_items", "_licensed_items_purchases", "_licensed_items_checkouts", - "_metamodeling", "_nih_sparc", "_nih_sparc_redirections", - "_projects_crud", + "_projects", "_projects_comments", "_projects_folders", "_projects_groups", @@ -59,7 +58,6 @@ "_statics", "_storage", "_trash", - "_version_control", "_workspaces", # maintenance ---- "_admin", diff --git a/api/specs/web-server/requirements.txt b/api/specs/web-server/requirements.txt index 62bea8dd0a9..8ffca6a489d 100644 --- a/api/specs/web-server/requirements.txt +++ b/api/specs/web-server/requirements.txt @@ -3,6 +3,7 @@ --constraint ../../../requirements/constraints.txt fastapi +fastapi-pagination jsonref pydantic pydantic-extra-types diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt index 28b389e0d4d..1bbadfb1796 100644 --- a/api/tests/requirements.txt +++ b/api/tests/requirements.txt @@ -1,6 +1,6 @@ -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c ../../requirements/constraints.txt # -r requirements.in @@ -11,13 +11,13 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -certifi==2024.12.14 +certifi==2025.1.31 # via # -c ../../requirements/constraints.txt # requests charset-normalizer==3.4.1 # via requests -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements.in # pytest-cov @@ -74,11 +74,11 @@ pathable==0.4.4 # via jsonschema-path pluggy==1.5.0 # via pytest -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements.in # pytest-asyncio @@ -109,7 +109,7 @@ requests==2.32.3 # via jsonschema-path rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing diff --git a/ci/github/integration-testing/docker-api-proxy.bash b/ci/github/integration-testing/docker-api-proxy.bash new file mode 100755 index 00000000000..c7ad9775c07 --- /dev/null +++ b/ci/github/integration-testing/docker-api-proxy.bash @@ -0,0 +1,40 @@ +#!/bin/bash +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -o errexit # abort on nonzero exitstatus +set -o nounset # abort on unbound variable +set -o pipefail # don't hide errors within pipes +IFS=$'\n\t' + +install() { + make devenv + # shellcheck source=/dev/null + source .venv/bin/activate + pushd services/docker-api-proxy + make install-ci + popd + uv pip list + make info-images +} + +test() { + # shellcheck source=/dev/null + source .venv/bin/activate + pushd services/docker-api-proxy + make test-ci-integration + popd +} + +clean_up() { + docker images + make down +} + +# Check if the function exists (bash specific) +if declare -f "$1" >/dev/null; then + # call arguments verbatim + "$@" +else + # Show a helpful error + echo "'$1' is not a known function name" >&2 + exit 1 +fi diff --git a/ci/github/unit-testing/storage.bash b/ci/github/unit-testing/storage.bash index 34b2a358ebe..3295e4d6895 100755 --- a/ci/github/unit-testing/storage.bash +++ b/ci/github/unit-testing/storage.bash @@ -19,7 +19,7 @@ test() { # shellcheck source=/dev/null source .venv/bin/activate pushd services/storage - make test-ci-unit + make test-ci-unit pytest-parameters="--disk-usage" popd } diff --git a/docs/controller-service-repository.drawio.svg b/docs/controller-service-repository.drawio.svg new file mode 100644 index 00000000000..6a0e6ae91d2 --- /dev/null +++ b/docs/controller-service-repository.drawio.svg @@ -0,0 +1,479 @@ + + + + + + + + + + + + + + + + +
+
+
+ web +
+ APP +
+
+
+
+ + web... + +
+
+
+ + + + + + + +
+
+
+ CONTROLLER +
+
+
+
+ + CONTROLLER + +
+
+
+ + + + + + + +
+
+
+ SERVICE +
+
+
+
+ + SERVICE + +
+
+
+ + + + + + + + +
+
+
+ Persistence +
+
+
+
+ + Persistence + +
+
+
+ + + + + + + +
+
+
+ REPOSITORY +
+
+
+
+ + REPOSITORY + +
+
+
+ + + + + + + + + + + +
+
+
+ Dependencies go inwards +
+
+
+
+ + Dependencies go inwards + +
+
+
+ + + + + + + + + + + + + + +
+
+
+ CONTROLLER +
+
+
+
+ + CONTROLLER + +
+
+
+ + + + + + + + + + + + + + + + + + +
+
+
+ OTHER +
+ web APPS +
+
+
+
+ + OTHER... + +
+
+
+ + + + + + + + + + + + + + + +
+
+
+ Domain A +
+
+
+
+ + Domain A + +
+
+
+ + + + + + + +
+
+
+ + rest + + api +
+ (schema-model, http-errors) +
+
+
+
+
+ + rest api... + +
+
+
+ + + + + + + +
+
+
+ + service api +
+ + io: + + + domain-model, domain-errors + +
+
+
+
+
+ + service api... + +
+
+
+ + + + + + + +
+
+
+ + repository api + +
+ + io: + + + domain-model, domain-errors + +
+
+
+
+
+ + repository api... + +
+
+
+ + + + + + + +
+
+
+ Domain B +
+
+
+
+ + Domain B + +
+
+
+ + + + + + + + + + + +
+
+
+ + service api +
+ + io: + + + domain-model, domain-errors + +
+
+
+
+
+ + service api... + +
+
+
+ + + + + + + +
+
+
+ repository api +
+ + io: + + + domain-model, domain-errors + +
+
+
+
+
+ + repository api... + +
+
+
+ + + + + + + +
+
+
+ + rpc + + api +
+ + io: schema + + + -model, rpc-exceptions + +
+
+
+
+
+ + rpc api... + +
+
+
+ + + + + + + +
+
+
+ + rest + + api +
+ + io: schema + + + -model, http-errors + +
+
+
+
+
+ + rest api... + +
+
+
+
+ + + + + Text is not SVG - cannot display + + + +
diff --git a/docs/messages-guidelines.md b/docs/messages-guidelines.md new file mode 100644 index 00000000000..cc07d2c2d1f --- /dev/null +++ b/docs/messages-guidelines.md @@ -0,0 +1,134 @@ +# Error and Warning Message Guidelines + +These guidelines ensure that messages are user-friendly, clear, and helpful while maintaining a professional tone. 🚀 + +Some details: + +- Originated from [guidelines](https://wiki.speag.com/projects/SuperMash/wiki/Concepts/GUI) by @eofli and refined iterating with AI +- Here’s the fully expanded and rewritten list of **error and warning message guidelines**, each with: + - A **guideline** + - A **rationale** + - A ❌ **bad example** + - A ✅ **good example** + - A **reference** +- This list is intended to be short enough to be read and understood for humans as well as complete so that it can be used as context for automatic correction of error/warning messages + +--- + +## 1. Be Clear and Concise + +- **Guideline:** Use straightforward language to describe the issue without unnecessary words. +- **Rationale:** Users can quickly understand the problem and take corrective action when messages are simple and to the point. +- ❌ **Bad Example:** + `"An error has occurred due to an unexpected input that couldn't be parsed correctly."` +- ✅ **Good Example:** + `"We couldn't process your request. Please check your input and try again."` +- **[Reference](https://uxwritinghub.com/error-message-examples/)** + +--- + +## 2. Provide Specific and Actionable Information + +- **Guideline:** Clearly state what went wrong and how the user can fix it. +- **Rationale:** Specific guidance helps users resolve issues efficiently, reducing frustration. +- ❌ **Bad Example:** + `"Something went wrong."` +- ✅ **Good Example:** + `"Your session has expired. Please log in again to continue."` +- **[Reference](https://www.nngroup.com/articles/error-message-guidelines/)** + +--- + +## 3. Avoid Technical Jargon + +- **Guideline:** Use plain language instead of technical terms or codes. +- **Rationale:** Non-technical users may not understand complex terminology, hindering their ability to resolve the issue. +- ❌ **Bad Example:** + `"Error 429: Too many requests per second."` +- ✅ **Good Example:** + `"You’ve made too many requests. Please wait a moment and try again."` +- **[Reference](https://cxl.com/blog/error-messages/)** + +--- + +## 4. Use a Polite and Non-Blaming Tone + +- **Guideline:** Frame messages in a way that doesn't place blame on the user. +- **Rationale:** A respectful tone maintains a positive user experience and encourages users to continue using the application. +- ❌ **Bad Example:** + `"You entered the wrong password."` +- ✅ **Good Example:** + `"The password doesn't match. Please try again."` +- **[Reference](https://atlassian.design/content/writing-guidelines/writing-error-messages/)** + +--- + +## 5. Avoid Negative Words and Phrases + +- **Guideline:** Steer clear of words like "error," "failed," "invalid," or "illegal." +- **Rationale:** Positive language reduces user anxiety and creates a more supportive experience. +- ❌ **Bad Example:** + `"Invalid email address."` +- ✅ **Good Example:** + `"The email address format doesn't look correct. Please check and try again."` +- **[Reference](https://atlassian.design/content/writing-guidelines/writing-error-messages/)** + +--- + +## 6. Place Messages Appropriately + +- **Guideline:** Display error messages near the relevant input field or in a clear, noticeable location. +- **Rationale:** Proper placement ensures users notice the message and understand where the issue occurred. +- ❌ **Bad Example:** + Showing a generic "Form submission failed" message at the top of the page. +- ✅ **Good Example:** + Placing "Please enter a valid phone number" directly below the phone input field. +- **[Reference](https://www.smashingmagazine.com/2022/08/error-messages-ux-design/)** + +--- + +## 7. Use Inline Validation When Possible + +- **Guideline:** Provide real-time feedback as users interact with input fields. +- **Rationale:** Inline validation allows users to correct errors immediately, enhancing the flow and efficiency of the interaction. +- ❌ **Bad Example:** + Waiting until form submission to show all validation errors. +- ✅ **Good Example:** + Displaying "Password must be at least 8 characters" while the user types. +- **[Reference](https://cxl.com/blog/error-messages/)** + +--- + +## 8. Avoid Using All-Caps and Excessive Punctuation + +- **Guideline:** Refrain from writing messages in all capital letters or using multiple exclamation marks. +- **Rationale:** All-caps and excessive punctuation can be perceived as shouting, which may frustrate users. +- ❌ **Bad Example:** + `"INVALID INPUT!!!"` +- ✅ **Good Example:** + `"This input doesn't look correct. Please check and try again."` +- **[Reference](https://uxwritinghub.com/error-message-examples/)** + +--- + +## 9. Use Humor Sparingly + +- **Guideline:** Incorporate light-hearted language only when appropriate and aligned with the application's tone. +- **Rationale:** While humor can ease tension, it may not be suitable for all users or situations and can sometimes be misinterpreted. +- ❌ **Bad Example:** + `"Oopsie daisy! You broke something!"` +- ✅ **Good Example:** + `"Something went wrong. Try again, or contact support if the issue continues."` +- **[Reference](https://cxl.com/blog/error-messages/)** + +--- + +## 10. Offer Alternative Solutions or Support + +- **Guideline:** If the user cannot resolve the issue independently, provide a way to contact support or access help resources. +- **Rationale:** Offering support options ensures users don't feel stranded and can seek help to resolve their issues. +- ❌ **Bad Example:** + `"Access denied."` +- ✅ **Good Example:** + `"You don't have permission to view this page. Contact support if you think this is a mistake."` +- **[Reference](https://learn.microsoft.com/en-us/dynamics365/business-central/dev-itpro/developer/devenv-error-handling-guidelines/)** diff --git a/package-lock.json b/package-lock.json index a98cd889971..2bd1885319f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,691 +1,961 @@ { + "name": "osparc-simcore", + "lockfileVersion": 3, "requires": true, - "lockfileVersion": 1, - "dependencies": { - "@babel/code-frame": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", - "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "packages": { + "": { + "devDependencies": { + "@pact-foundation/pact-cli": "^16.0.4", + "babel-eslint": "^10.1.0", + "eslint": "^6.8.0", + "eslint-config-qx": "^0.0.1", + "eslint-plugin-qx-rules": "^0.1.0", + "puppeteer": "^1.19.0", + "puppeteer-to-istanbul": "^1.2.2", + "yargs": "^13.3.0", + "yargs-parser": ">=13.1.2" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", "dev": true, - "requires": { - "@babel/highlight": "^7.8.3" + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/code-frame/node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/@babel/generator": { + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.5.tgz", + "integrity": "sha512-2caSP6fN9I7HOe6nqhtft7V4g7/V/gfDsC3Ag4W7kEzzvRGKqiv0pu0HogPiZ3KaVSoNDhUws6IJjDjpfmYIXw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.26.5", + "@babel/types": "^7.26.5", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" } }, - "@babel/generator": { - "version": "7.8.8", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.8.8.tgz", - "integrity": "sha512-HKyUVu69cZoclptr8t8U5b6sx6zoWjh8jiUhnuj3MpZuKT2dJ8zPTuiy31luq32swhI0SpwItCIlU8XW7BZeJg==", + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", "dev": true, - "requires": { - "@babel/types": "^7.8.7", - "jsesc": "^2.5.1", - "lodash": "^4.17.13", - "source-map": "^0.5.0" + "engines": { + "node": ">=6.9.0" } }, - "@babel/helper-function-name": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz", - "integrity": "sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==", + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", "dev": true, - "requires": { - "@babel/helper-get-function-arity": "^7.8.3", - "@babel/template": "^7.8.3", - "@babel/types": "^7.8.3" + "engines": { + "node": ">=6.9.0" } }, - "@babel/helper-get-function-arity": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", - "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", + "node_modules/@babel/parser": { + "version": "7.26.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.7.tgz", + "integrity": "sha512-kEvgGGgEjRUutvdVvZhbn/BxVt+5VSpwXz1j3WYXQbXDo8KzFOPNG2GQbdAiNq8g6wn1yKk7C/qrke03a84V+w==", "dev": true, - "requires": { - "@babel/types": "^7.8.3" + "dependencies": { + "@babel/types": "^7.26.7" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" } }, - "@babel/helper-split-export-declaration": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", - "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", + "node_modules/@babel/template": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", + "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", "dev": true, - "requires": { - "@babel/types": "^7.8.3" + "dependencies": { + "@babel/code-frame": "^7.25.9", + "@babel/parser": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" } }, - "@babel/highlight": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", - "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", + "node_modules/@babel/traverse": { + "version": "7.26.7", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.7.tgz", + "integrity": "sha512-1x1sgeyRLC3r5fQOM0/xtQKsYjyxmFjaOrLJNtZ81inNjyJHGIolTULPiSc/2qe1/qfpFLisLQYFnnZl7QoedA==", "dev": true, - "requires": { - "chalk": "^2.0.0", - "esutils": "^2.0.2", - "js-tokens": "^4.0.0" + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.5", + "@babel/parser": "^7.26.7", + "@babel/template": "^7.25.9", + "@babel/types": "^7.26.7", + "debug": "^4.3.1", + "globals": "^11.1.0" }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, "dependencies": { - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true } } }, - "@babel/parser": { - "version": "7.8.8", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.8.tgz", - "integrity": "sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA==", + "node_modules/@babel/types": { + "version": "7.26.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.7.tgz", + "integrity": "sha512-t8kDRGrKXyp6+tjUh7hw2RLyclsW4TRoRvRHtSyAX9Bb5ldlFh+90YAYY6awRXrlB4G5G2izNeGySpATlFzmOg==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", "dev": true }, - "@babel/template": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", - "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@pact-foundation/pact-cli": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@pact-foundation/pact-cli/-/pact-cli-16.0.4.tgz", + "integrity": "sha512-qXzJUnXb6XMZyiXwfKgRwUQfpS61uSiLguR2hQWC3m+RrdnzrYug+YBHoACdIlvH6Lj/SQ86/26UJ4Z9V+OYMw==", + "cpu": [ + "x64", + "arm64" + ], + "dev": true, + "os": [ + "darwin", + "linux", + "win32" + ], + "dependencies": { + "chalk": "4.1.2", + "check-types": "11.2.3", + "cross-spawn": "7.0.5", + "mkdirp": "3.0.1", + "needle": "^3.3.1", + "pino": "^9.5.0", + "pino-pretty": "^13.0.0", + "promise-timeout": "1.3.0", + "rimraf": "4.4.1", + "underscore": "1.13.7" + }, + "bin": { + "pact": "bin/pact.js", + "pact-broker": "bin/pact-broker.js", + "pact-message": "bin/pact-message.js", + "pact-mock-service": "bin/pact-mock-service.js", + "pact-provider-verifier": "bin/pact-provider-verifier.js", + "pact-stub-service": "bin/pact-stub-service.js", + "pactflow": "bin/pactflow.js" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "@pact-foundation/pact-cli-darwin-arm64": "16.0.4", + "@pact-foundation/pact-cli-darwin-x64": "16.0.4", + "@pact-foundation/pact-cli-linux-arm64": "16.0.4", + "@pact-foundation/pact-cli-linux-x64": "16.0.4", + "@pact-foundation/pact-cli-windows-x64": "16.0.4" + } + }, + "node_modules/@pact-foundation/pact-cli-darwin-arm64": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@pact-foundation/pact-cli-darwin-arm64/-/pact-cli-darwin-arm64-16.0.4.tgz", + "integrity": "sha512-WWAZn+3HrnItVXqh04e99DgCdiW2T6I4ZRg3MPC5HeOQ3aowspPa1+VSoPMhM7txG0ZkmiQUbBiXPJjebhYLwg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@pact-foundation/pact-cli-darwin-x64": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@pact-foundation/pact-cli-darwin-x64/-/pact-cli-darwin-x64-16.0.4.tgz", + "integrity": "sha512-THSBPlwA3boHUlxMAyv11H6RPXYEiNas2D/PmFlwgWqRjNsLxC52wUCimBPMFiRgAZEuMVbyb4spQI4+UqZe9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@pact-foundation/pact-cli-linux-arm64": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@pact-foundation/pact-cli-linux-arm64/-/pact-cli-linux-arm64-16.0.4.tgz", + "integrity": "sha512-e4tLUlUJgK2vJG1OlaVx2oJRnFERdMPryVuvkVnJ9Lbd8RLT07s5i10A92rOUkSVYOM0BV6Ulp7GY0brFg4ZMg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@pact-foundation/pact-cli-linux-x64": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@pact-foundation/pact-cli-linux-x64/-/pact-cli-linux-x64-16.0.4.tgz", + "integrity": "sha512-VjEOjStCDR+kCy9WHg8k8nW4zZMqbPaCTKn5xBhgTdG/b1xTc29HZAb2Q/+XHwK8AB3Yi6+BDMeIEp/JBeRy9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@pact-foundation/pact-cli-windows-x64": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@pact-foundation/pact-cli-windows-x64/-/pact-cli-windows-x64-16.0.4.tgz", + "integrity": "sha512-xvVx/xXYPIjuR1PhK+VxiksnQfmq0h6z4WGYZcS3c6ygSMvvcBZL7ZT5zLr+LEF6EnSnD7eQr2QK7cY6YX5ugg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@pact-foundation/pact-cli/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, - "requires": { - "@babel/code-frame": "^7.8.3", - "@babel/parser": "^7.8.6", - "@babel/types": "^7.8.6" + "dependencies": { + "balanced-match": "^1.0.0" } }, - "@babel/traverse": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.8.6.tgz", - "integrity": "sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A==", + "node_modules/@pact-foundation/pact-cli/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "requires": { - "@babel/code-frame": "^7.8.3", - "@babel/generator": "^7.8.6", - "@babel/helper-function-name": "^7.8.3", - "@babel/helper-split-export-declaration": "^7.8.3", - "@babel/parser": "^7.8.6", - "@babel/types": "^7.8.6", - "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.13" - }, - "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "@babel/types": { - "version": "7.8.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.8.7.tgz", - "integrity": "sha512-k2TreEHxFA4CjGkL+GYjRyx35W0Mr7DP5+9q6WMkyKXB+904bYmG40syjMFV0oLlhhFCwWl0vA0DyzTDkwAiJw==", + "node_modules/@pact-foundation/pact-cli/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "requires": { - "esutils": "^2.0.2", - "lodash": "^4.17.13", - "to-fast-properties": "^2.0.0" + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@pact-foundation/pact-cli/node_modules/cross-spawn": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz", + "integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/glob": { + "version": "9.3.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-9.3.5.tgz", + "integrity": "sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "minimatch": "^8.0.2", + "minipass": "^4.2.4", + "path-scurry": "^1.6.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/minimatch": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-8.0.4.tgz", + "integrity": "sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/mkdirp": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "dev": true, + "bin": { + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/rimraf": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-4.4.1.tgz", + "integrity": "sha512-Gk8NlF062+T9CqNGn6h4tls3k6T1+/nXdOcSZVikNVtlRdYpA7wRJJMoXmuvOnLW844rPjdQ7JgXCYM6PPC/og==", + "dev": true, + "dependencies": { + "glob": "^9.2.0" + }, + "bin": { + "rimraf": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@pact-foundation/pact-cli/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, - "@types/color-name": { + "node_modules/@types/color-name": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", "dev": true }, - "acorn": { + "node_modules/acorn": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", - "dev": true + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } }, - "acorn-jsx": { + "node_modules/acorn-jsx": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.2.0.tgz", "integrity": "sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==", - "dev": true + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0" + } }, - "agent-base": { + "node_modules/agent-base": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", "dev": true, - "requires": { + "dependencies": { "es6-promisify": "^5.0.0" + }, + "engines": { + "node": ">= 4.0.0" } }, - "ajv": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.0.tgz", - "integrity": "sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw==", + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, - "requires": { + "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" } }, - "ajv-keywords": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-1.5.1.tgz", - "integrity": "sha1-MU3QpLM2j609/NxU7eYXG4htrzw=", - "dev": true - }, - "ansi-escapes": { + "node_modules/ansi-escapes": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", "dev": true, - "requires": { + "dependencies": { "type-fest": "^0.11.0" }, - "dependencies": { - "type-fest": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", - "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", - "dev": true - } + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } }, - "ansi-styles": { + "node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dev": true, - "requires": { + "dependencies": { "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" } }, - "argparse": { + "node_modules/argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dev": true, - "requires": { + "dependencies": { "sprintf-js": "~1.0.2" } }, - "astral-regex": { + "node_modules/astral-regex": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", - "dev": true + "dev": true, + "engines": { + "node": ">=4" + } }, - "async-limiter": { + "node_modules/async-limiter": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==", "dev": true }, - "babel-code-frame": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", - "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", "dev": true, - "requires": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true - }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - } + "engines": { + "node": ">=8.0.0" } }, - "babel-eslint": { + "node_modules/babel-eslint": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", "integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==", + "deprecated": "babel-eslint is now @babel/eslint-parser. This package will no longer receive updates.", "dev": true, - "requires": { + "dependencies": { "@babel/code-frame": "^7.0.0", "@babel/parser": "^7.7.0", "@babel/traverse": "^7.7.0", "@babel/types": "^7.7.0", "eslint-visitor-keys": "^1.0.0", "resolve": "^1.12.0" + }, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "eslint": ">= 4.12.1" } }, - "balanced-match": { + "node_modules/balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", "dev": true }, - "brace-expansion": { + "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, - "requires": { + "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, - "buffer-crc32": { + "node_modules/buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "dev": true + "dev": true, + "engines": { + "node": "*" + } }, - "buffer-from": { + "node_modules/buffer-from": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", "dev": true }, - "caller-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-0.1.0.tgz", - "integrity": "sha1-lAhe9jWB7NPaqSREqP6U6CV3dR8=", - "dev": true, - "requires": { - "callsites": "^0.2.0" - }, - "dependencies": { - "callsites": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-0.2.0.tgz", - "integrity": "sha1-r6uWJikQp/M8GaV3WCXGnzTjUMo=", - "dev": true - } - } - }, - "callsites": { + "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true + "dev": true, + "engines": { + "node": ">=6" + } }, - "camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", - "dev": true + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } }, - "chalk": { + "node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dev": true, - "requires": { + "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" } }, - "chardet": { + "node_modules/chardet": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, - "circular-json": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/circular-json/-/circular-json-0.3.3.tgz", - "integrity": "sha512-UZK3NBx2Mca+b5LsG7bY183pHWt5Y1xts4P3Pz7ENTwGVnJOUWbRb3ocjvX7hx9tq/yTAdclXm9sZ38gNuem4A==", + "node_modules/check-types": { + "version": "11.2.3", + "resolved": "https://registry.npmjs.org/check-types/-/check-types-11.2.3.tgz", + "integrity": "sha512-+67P1GkJRaxQD6PKK0Et9DhwQB+vGg3PM5+aavopCpZT1lj9jeqfvpgTLAWErNj8qApkkmXlu/Ug74kmhagkXg==", "dev": true }, - "cli-cursor": { + "node_modules/cli-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", "dev": true, - "requires": { + "dependencies": { "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" } }, - "cli-width": { + "node_modules/cli-width": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", "dev": true }, - "cliui": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", - "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", "dev": true, - "requires": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0", - "wrap-ansi": "^2.0.0" - }, "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" } }, - "clone": { + "node_modules/clone": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=", - "dev": true - }, - "co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", - "dev": true - }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true + "dev": true, + "engines": { + "node": ">=0.8" + } }, - "color-convert": { + "node_modules/color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dev": true, - "requires": { + "dependencies": { "color-name": "1.1.3" } }, - "color-name": { + "node_modules/color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", "dev": true }, - "concat-map": { + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true + }, + "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, - "concat-stream": { + "node_modules/concat-stream": { "version": "1.6.2", "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", "dev": true, - "requires": { + "engines": [ + "node >= 0.8" + ], + "dependencies": { "buffer-from": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^2.2.2", "typedarray": "^0.0.6" } }, - "core-util-is": { + "node_modules/core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", "dev": true }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "node_modules/cross-spawn": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", "dev": true, - "requires": { + "dependencies": { "nice-try": "^1.0.4", "path-key": "^2.0.1", "semver": "^5.5.0", "shebang-command": "^1.2.0", "which": "^1.2.9" }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - } + "engines": { + "node": ">=4.8" } }, - "d": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", - "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", + "node_modules/cross-spawn/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "dev": true, - "requires": { - "es5-ext": "^0.10.50", - "type": "^1.0.1" + "bin": { + "semver": "bin/semver" } }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", "dev": true, - "requires": { + "engines": { + "node": "*" + } + }, + "node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { "ms": "^2.1.1" } }, - "decamelize": { + "node_modules/decamelize": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "dev": true + "dev": true, + "engines": { + "node": ">=0.10.0" + } }, - "deep-is": { + "node_modules/deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", "dev": true }, - "doctrine": { + "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "dev": true, - "requires": { + "dependencies": { "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" } }, - "emoji-regex": { + "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true }, - "end-of-stream": { + "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dev": true, - "requires": { + "dependencies": { "once": "^1.4.0" } }, - "es5-ext": { - "version": "0.10.53", - "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.53.tgz", - "integrity": "sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==", - "dev": true, - "requires": { - "es6-iterator": "~2.0.3", - "es6-symbol": "~3.1.3", - "next-tick": "~1.0.0" - } - }, - "es6-iterator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", - "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=", - "dev": true, - "requires": { - "d": "1", - "es5-ext": "^0.10.35", - "es6-symbol": "^3.1.1" - } - }, - "es6-map": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/es6-map/-/es6-map-0.1.5.tgz", - "integrity": "sha1-kTbgUD3MBqMBaQ8LsU/042TpSfA=", - "dev": true, - "requires": { - "d": "1", - "es5-ext": "~0.10.14", - "es6-iterator": "~2.0.1", - "es6-set": "~0.1.5", - "es6-symbol": "~3.1.1", - "event-emitter": "~0.3.5" - } - }, - "es6-promise": { + "node_modules/es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==", "dev": true }, - "es6-promisify": { + "node_modules/es6-promisify": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", "dev": true, - "requires": { + "dependencies": { "es6-promise": "^4.0.3" } }, - "es6-set": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/es6-set/-/es6-set-0.1.5.tgz", - "integrity": "sha1-0rPsXU2ADO2BjbU40ol02wpzzLE=", - "dev": true, - "requires": { - "d": "1", - "es5-ext": "~0.10.14", - "es6-iterator": "~2.0.1", - "es6-symbol": "3.1.1", - "event-emitter": "~0.3.5" - }, - "dependencies": { - "es6-symbol": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.1.tgz", - "integrity": "sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc=", - "dev": true, - "requires": { - "d": "1", - "es5-ext": "~0.10.14" - } - } - } - }, - "es6-symbol": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", - "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", "dev": true, - "requires": { - "d": "^1.0.1", - "ext": "^1.1.2" + "engines": { + "node": ">=0.8.0" } }, - "es6-weak-map": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.3.tgz", - "integrity": "sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==", + "node_modules/eslint": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, - "requires": { - "d": "1", - "es5-ext": "^0.10.46", - "es6-iterator": "^2.0.3", - "es6-symbol": "^3.1.1" - } - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true - }, - "escope": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/escope/-/escope-3.6.0.tgz", - "integrity": "sha1-4Bl16BJ4GhY6ba392AOY3GTIicM=", - "dev": true, - "requires": { - "es6-map": "^0.1.3", - "es6-weak-map": "^2.0.1", - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "eslint": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", - "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", - "dev": true, - "requires": { + "dependencies": { "@babel/code-frame": "^7.0.0", "ajv": "^6.10.0", "chalk": "^2.1.0", @@ -724,827 +994,515 @@ "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, - "dependencies": { - "@babel/code-frame": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", - "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", - "dev": true, - "requires": { - "@babel/highlight": "^7.8.3" - } - }, - "@babel/highlight": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", - "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", - "dev": true, - "requires": { - "chalk": "^2.0.0", - "esutils": "^2.0.2", - "js-tokens": "^4.0.0" - } - }, - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "eslint-scope": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", - "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "globals": { - "version": "12.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", - "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", - "dev": true, - "requires": { - "type-fest": "^0.8.1" - } - }, - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true - } + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "eslint-config-qx": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/eslint-config-qx/-/eslint-config-qx-0.1.0.tgz", - "integrity": "sha512-fkQSBji7cwh63SuFoM1byJgO6ZWE2m1C0e8dwxEJy2S5RQrqFAIu7faU0Szi7Cub+bPPKlpVfIv3dfieaaAbYg==", + "node_modules/eslint-config-qx": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/eslint-config-qx/-/eslint-config-qx-0.0.1.tgz", + "integrity": "sha512-OeAw+muVzN21SiOTLZ86ZJn/Tr99caGtLaNuTnhUQz2SegPH9G3Y5zCKdNrs1ngvGOD/bqfNh+x6M2fA07v0hw==", + "deprecated": "moved to @qooxdoo/eslint-qx-rules", "dev": true, - "requires": { - "eslint": "^3.19.0", - "eslint-plugin-qx-rules": "^0.1.0" - }, "dependencies": { - "acorn": { - "version": "5.7.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", - "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", - "dev": true - }, - "acorn-jsx": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", - "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", - "dev": true, - "requires": { - "acorn": "^3.0.4" - }, - "dependencies": { - "acorn": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", - "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", - "dev": true - } - } - }, - "ajv": { - "version": "4.11.8", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-4.11.8.tgz", - "integrity": "sha1-gv+wKynmYq5TvcIK8VlHcGc5xTY=", - "dev": true, - "requires": { - "co": "^4.6.0", - "json-stable-stringify": "^1.0.1" - } - }, - "ansi-escapes": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-1.4.0.tgz", - "integrity": "sha1-06ioOzGapneTZisT52HHkRQiMG4=", - "dev": true - }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true - }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "cli-cursor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-1.0.2.tgz", - "integrity": "sha1-ZNo/fValRBLll5S9Ytw1KV6PKYc=", - "dev": true, - "requires": { - "restore-cursor": "^1.0.1" - } - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "eslint": { - "version": "3.19.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-3.19.0.tgz", - "integrity": "sha1-yPxiAcf0DdCJQbh8CFdnOGpnmsw=", - "dev": true, - "requires": { - "babel-code-frame": "^6.16.0", - "chalk": "^1.1.3", - "concat-stream": "^1.5.2", - "debug": "^2.1.1", - "doctrine": "^2.0.0", - "escope": "^3.6.0", - "espree": "^3.4.0", - "esquery": "^1.0.0", - "estraverse": "^4.2.0", - "esutils": "^2.0.2", - "file-entry-cache": "^2.0.0", - "glob": "^7.0.3", - "globals": "^9.14.0", - "ignore": "^3.2.0", - "imurmurhash": "^0.1.4", - "inquirer": "^0.12.0", - "is-my-json-valid": "^2.10.0", - "is-resolvable": "^1.0.0", - "js-yaml": "^3.5.1", - "json-stable-stringify": "^1.0.0", - "levn": "^0.3.0", - "lodash": "^4.0.0", - "mkdirp": "^0.5.0", - "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.1", - "pluralize": "^1.2.1", - "progress": "^1.1.8", - "require-uncached": "^1.0.2", - "shelljs": "^0.7.5", - "strip-bom": "^3.0.0", - "strip-json-comments": "~2.0.1", - "table": "^3.7.8", - "text-table": "~0.2.0", - "user-home": "^2.0.0" - } - }, - "espree": { - "version": "3.5.4", - "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", - "integrity": "sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A==", - "dev": true, - "requires": { - "acorn": "^5.5.0", - "acorn-jsx": "^3.0.0" - } - }, - "figures": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-1.7.0.tgz", - "integrity": "sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4=", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5", - "object-assign": "^4.1.0" - } - }, - "file-entry-cache": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-2.0.0.tgz", - "integrity": "sha1-w5KZDD5oR4PYOLjISkXYoEhFg2E=", - "dev": true, - "requires": { - "flat-cache": "^1.2.1", - "object-assign": "^4.0.1" - } - }, - "flat-cache": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-1.3.4.tgz", - "integrity": "sha512-VwyB3Lkgacfik2vhqR4uv2rvebqmDvFu4jlN/C1RzWoJEo8I7z4Q404oiqYCkq41mni8EzQnm95emU9seckwtg==", - "dev": true, - "requires": { - "circular-json": "^0.3.1", - "graceful-fs": "^4.1.2", - "rimraf": "~2.6.2", - "write": "^0.2.1" - } - }, - "globals": { - "version": "9.18.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-9.18.0.tgz", - "integrity": "sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==", - "dev": true - }, - "ignore": { - "version": "3.3.10", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", - "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", - "dev": true - }, - "inquirer": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-0.12.0.tgz", - "integrity": "sha1-HvK/1jUE3wvHV4X/+MLEHfEvB34=", - "dev": true, - "requires": { - "ansi-escapes": "^1.1.0", - "ansi-regex": "^2.0.0", - "chalk": "^1.0.0", - "cli-cursor": "^1.0.1", - "cli-width": "^2.0.0", - "figures": "^1.3.5", - "lodash": "^4.3.0", - "readline2": "^1.0.1", - "run-async": "^0.1.0", - "rx-lite": "^3.1.2", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.0", - "through": "^2.3.6" - } - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, - "onetime": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-1.1.0.tgz", - "integrity": "sha1-ofeDj4MUxRbwXs78vEzP4EtO14k=", - "dev": true - }, - "progress": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/progress/-/progress-1.1.8.tgz", - "integrity": "sha1-4mDHj2Fhzdmw5WzD4Khd4Xx6V74=", - "dev": true - }, - "restore-cursor": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-1.0.1.tgz", - "integrity": "sha1-NGYfRohjJ/7SmRR5FSJS35LapUE=", - "dev": true, - "requires": { - "exit-hook": "^1.0.0", - "onetime": "^1.0.0" - } - }, - "run-async": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-0.1.0.tgz", - "integrity": "sha1-yK1KXhEGYeQCp9IbUw4AnyX444k=", - "dev": true, - "requires": { - "once": "^1.3.0" - } - }, - "slice-ansi": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-0.0.4.tgz", - "integrity": "sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU=", - "dev": true - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "dev": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "dev": true - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - }, - "table": { - "version": "3.8.3", - "resolved": "https://registry.npmjs.org/table/-/table-3.8.3.tgz", - "integrity": "sha1-K7xULw/amGGnVdOUf+/Ys/UThV8=", - "dev": true, - "requires": { - "ajv": "^4.7.0", - "ajv-keywords": "^1.0.0", - "chalk": "^1.1.1", - "lodash": "^4.0.0", - "slice-ansi": "0.0.4", - "string-width": "^2.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "write": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/write/-/write-0.2.1.tgz", - "integrity": "sha1-X8A4KOJkzqP+kUVUdvejxWbLB1c=", - "dev": true, - "requires": { - "mkdirp": "^0.5.1" - } - } + "eslint-plugin-qx-rules": "^0.0.1" + }, + "engines": { + "node": ">=4.5" + }, + "peerDependencies": { + "eslint": ">=3.4.0" + } + }, + "node_modules/eslint-config-qx/node_modules/eslint-plugin-qx-rules": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-qx-rules/-/eslint-plugin-qx-rules-0.0.1.tgz", + "integrity": "sha512-AAesCN007yFoX5aXN6qE/dW7u/poZk8dhhCMxxW5XbIFCgtWMikfr3aJuErqCpsztcLcYQXMx5U9J8yjM6guFA==", + "deprecated": "moved to @qooxdoo/eslint-qx-plugins", + "dev": true, + "engines": { + "node": ">= 4.5.0" } }, - "eslint-plugin-qx-rules": { + "node_modules/eslint-plugin-qx-rules": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-qx-rules/-/eslint-plugin-qx-rules-0.1.0.tgz", "integrity": "sha512-TmldxfvDvatPOtJxr1lFJsC0dTg3idZ3svQwRoR01zoZW7mJvBWxBGHeITUuH7qw9BcNPdyF4cV1fzUXejBvPg==", - "dev": true + "deprecated": "moved to @qooxdoo/eslint-qx-plugins", + "dev": true, + "engines": { + "node": ">= 4.5.0" + } }, - "eslint-utils": { + "node_modules/eslint-utils": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", "dev": true, - "requires": { + "dependencies": { "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" } }, - "eslint-visitor-keys": { + "node_modules/eslint-visitor-keys": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", - "dev": true + "dev": true, + "engines": { + "node": ">=4" + } }, - "espree": { + "node_modules/eslint/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/eslint/node_modules/eslint-scope": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint/node_modules/globals": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", + "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "dev": true, + "dependencies": { + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/espree": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", "dev": true, - "requires": { + "dependencies": { "acorn": "^7.1.1", "acorn-jsx": "^5.2.0", "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6.0.0" } }, - "esprima": { + "node_modules/esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } }, - "esquery": { + "node_modules/esquery": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.1.0.tgz", "integrity": "sha512-MxYW9xKmROWF672KqjO75sszsA8Mxhw06YFeS5VHlB98KDHbOSurm3ArsjO60Eaf3QmGMCP1yn+0JQkNLo/97Q==", "dev": true, - "requires": { + "dependencies": { "estraverse": "^4.0.0" + }, + "engines": { + "node": ">=0.6" } }, - "esrecurse": { + "node_modules/esrecurse": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", "dev": true, - "requires": { + "dependencies": { "estraverse": "^4.1.0" + }, + "engines": { + "node": ">=4.0" } }, - "estraverse": { + "node_modules/estraverse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true + "dev": true, + "engines": { + "node": ">=4.0" + } }, - "esutils": { + "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true - }, - "event-emitter": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", - "integrity": "sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk=", - "dev": true, - "requires": { - "d": "1", - "es5-ext": "~0.10.14" - } - }, - "execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", "dev": true, - "requires": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" + "engines": { + "node": ">=0.10.0" } }, - "exit-hook": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-1.1.1.tgz", - "integrity": "sha1-8FyiM7SMBdVP/wd2XfhQfpXAL/g=", - "dev": true - }, - "ext": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/ext/-/ext-1.4.0.tgz", - "integrity": "sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A==", - "dev": true, - "requires": { - "type": "^2.0.0" - }, - "dependencies": { - "type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/type/-/type-2.0.0.tgz", - "integrity": "sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow==", - "dev": true - } - } - }, - "external-editor": { + "node_modules/external-editor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", "dev": true, - "requires": { + "dependencies": { "chardet": "^0.7.0", "iconv-lite": "^0.4.24", "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" } }, - "extract-zip": { + "node_modules/extract-zip": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-1.7.0.tgz", "integrity": "sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA==", "dev": true, - "requires": { + "dependencies": { "concat-stream": "^1.6.2", "debug": "^2.6.9", "mkdirp": "^0.5.4", "yauzl": "^2.10.0" }, + "bin": { + "extract-zip": "cli.js" + } + }, + "node_modules/extract-zip/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", - "dev": true, - "requires": { - "pend": "~1.2.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true - }, - "yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - } + "ms": "2.0.0" } }, - "fast-deep-equal": { + "node_modules/extract-zip/node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/extract-zip/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/extract-zip/node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "node_modules/fast-copy": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-3.0.2.tgz", + "integrity": "sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==", + "dev": true + }, + "node_modules/fast-deep-equal": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", "dev": true }, - "fast-json-stable-stringify": { + "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, - "fast-levenshtein": { + "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "dev": true }, - "figures": { + "node_modules/fast-redact": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.5.0.tgz", + "integrity": "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true + }, + "node_modules/figures": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", "dev": true, - "requires": { + "dependencies": { "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "file-entry-cache": { + "node_modules/file-entry-cache": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", "dev": true, - "requires": { + "dependencies": { "flat-cache": "^2.0.1" + }, + "engines": { + "node": ">=4" } }, - "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, - "requires": { - "locate-path": "^2.0.0" + "engines": { + "node": ">=8" } }, - "flat-cache": { + "node_modules/flat-cache": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", "dev": true, - "requires": { + "dependencies": { "flatted": "^2.0.0", "rimraf": "2.6.3", "write": "1.0.3" + }, + "engines": { + "node": ">=4" } }, - "flatted": { + "node_modules/flatted": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==", "dev": true }, - "fs.realpath": { + "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", "dev": true }, - "functional-red-black-tree": { + "node_modules/functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", "dev": true }, - "generate-function": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz", - "integrity": "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==", - "dev": true, - "requires": { - "is-property": "^1.0.2" - } - }, - "generate-object-property": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", - "integrity": "sha1-nA4cQDCM6AT0eDYYuTf6iPmdUNA=", + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true, - "requires": { - "is-property": "^1.0.0" + "engines": { + "node": "6.* || 8.* || >= 10.*" } }, - "get-caller-file": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", - "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", - "dev": true - }, - "get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "dev": true, - "requires": { - "pump": "^3.0.0" - } - }, - "glob": { + "node_modules/glob": { "version": "7.1.6", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, - "requires": { + "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.0.4", "once": "^1.3.0", "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "glob-parent": { + "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, - "requires": { + "dependencies": { "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" } }, - "globals": { + "node_modules/globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true - }, - "graceful-fs": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", - "dev": true - }, - "has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - } + "engines": { + "node": ">=4" } }, - "has-flag": { + "node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/help-me": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz", + "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==", "dev": true }, - "https-proxy-agent": { + "node_modules/https-proxy-agent": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", "dev": true, - "requires": { + "dependencies": { "agent-base": "^4.3.0", "debug": "^3.1.0" + }, + "engines": { + "node": ">= 4.5.0" } }, - "iconv-lite": { + "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "dev": true, - "requires": { + "dependencies": { "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" } }, - "ignore": { + "node_modules/ignore": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true + "dev": true, + "engines": { + "node": ">= 4" + } }, - "import-fresh": { + "node_modules/import-fresh": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", "dev": true, - "requires": { + "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" } }, - "imurmurhash": { + "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true + "dev": true, + "engines": { + "node": ">=0.8.19" + } }, - "inflight": { + "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "dev": true, - "requires": { + "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, - "inherits": { + "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, - "inquirer": { + "node_modules/inquirer": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.1.0.tgz", "integrity": "sha512-5fJMWEmikSYu0nv/flMc475MhGbB7TSPd/2IpFV4I4rMklboCH2rQjYY5kKiYGHqUF9gvaambupcJFFG9dvReg==", "dev": true, - "requires": { + "dependencies": { "ansi-escapes": "^4.2.1", "chalk": "^3.0.0", "cli-cursor": "^3.1.0", @@ -1559,549 +1517,650 @@ "strip-ansi": "^6.0.0", "through": "^2.3.6" }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/inquirer/node_modules/ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "dev": true, "dependencies": { - "ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, - "requires": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - }, - "supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "interpret": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.2.0.tgz", - "integrity": "sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw==", - "dev": true + "node_modules/inquirer/node_modules/chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } }, - "invert-kv": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz", - "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==", + "node_modules/inquirer/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/inquirer/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "is-extglob": { + "node_modules/inquirer/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/inquirer/node_modules/strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/inquirer/node_modules/supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true + "dev": true, + "engines": { + "node": ">=0.10.0" + } }, - "is-fullwidth-code-point": { + "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true + "dev": true, + "engines": { + "node": ">=8" + } }, - "is-glob": { + "node_modules/is-glob": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", "dev": true, - "requires": { + "dependencies": { "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" } }, - "is-my-ip-valid": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-my-ip-valid/-/is-my-ip-valid-1.0.0.tgz", - "integrity": "sha512-gmh/eWXROncUzRnIa1Ubrt5b8ep/MGSnfAUI3aRp+sqTCs1tv1Isl8d8F6JmkN3dXKc3ehZMrtiPN9eL03NuaQ==", - "dev": true - }, - "is-my-json-valid": { - "version": "2.20.0", - "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.20.0.tgz", - "integrity": "sha512-XTHBZSIIxNsIsZXg7XB5l8z/OBFosl1Wao4tXLpeC7eKU4Vm/kdop2azkPqULwnfGQjmeDIyey9g7afMMtdWAA==", - "dev": true, - "requires": { - "generate-function": "^2.0.0", - "generate-object-property": "^1.1.0", - "is-my-ip-valid": "^1.0.0", - "jsonpointer": "^4.0.0", - "xtend": "^4.0.0" - } - }, - "is-promise": { + "node_modules/is-promise": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", "dev": true }, - "is-property": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", - "integrity": "sha1-V/4cTkhHTt1lsJkR8msc1Ald2oQ=", - "dev": true - }, - "is-resolvable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", - "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==", - "dev": true - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "dev": true - }, - "isarray": { + "node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", "dev": true }, - "isexe": { + "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", "dev": true }, - "js-tokens": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", - "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=", - "dev": true + "node_modules/joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", + "dev": true, + "engines": { + "node": ">=10" + } }, - "js-yaml": { + "node_modules/js-yaml": { "version": "3.13.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", "dev": true, - "requires": { + "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" } }, - "jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } }, - "json-schema-traverse": { + "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, - "json-stable-stringify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", - "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", - "dev": true, - "requires": { - "jsonify": "~0.0.0" - } - }, - "json-stable-stringify-without-jsonify": { + "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", "dev": true }, - "jsonify": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", - "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", - "dev": true - }, - "jsonpointer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.1.tgz", - "integrity": "sha1-T9kss04OnbPInIYi7PUfm5eMbLk=", - "dev": true - }, - "lcid": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz", - "integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", - "dev": true, - "requires": { - "invert-kv": "^2.0.0" - } - }, - "levn": { + "node_modules/levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", "dev": true, - "requires": { + "dependencies": { "prelude-ls": "~1.1.2", "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" } }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" } }, - "lodash": { + "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", "dev": true }, - "map-age-cleaner": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", - "integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", - "dev": true, - "requires": { - "p-defer": "^1.0.0" - } - }, - "mem": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", - "integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", - "dev": true, - "requires": { - "map-age-cleaner": "^0.1.1", - "mimic-fn": "^2.0.0", - "p-is-promise": "^2.0.0" - } + "node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true }, - "mime": { + "node_modules/mime": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==", - "dev": true + "dev": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } }, - "mimic-fn": { + "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true + "dev": true, + "engines": { + "node": ">=6" + } }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, - "requires": { + "dependencies": { "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.8.tgz", + "integrity": "sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==", + "dev": true, + "engines": { + "node": ">=8" } }, - "mkdirp": { + "node_modules/mkdirp": { "version": "0.5.6", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", "dev": true, - "requires": { + "dependencies": { "minimist": "^1.2.6" }, - "dependencies": { - "minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true - } + "bin": { + "mkdirp": "bin/cmd.js" } }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true }, - "mute-stream": { + "node_modules/mute-stream": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", "dev": true }, - "natural-compare": { + "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", "dev": true }, - "next-tick": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", - "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=", - "dev": true + "node_modules/needle": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/needle/-/needle-3.3.1.tgz", + "integrity": "sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q==", + "dev": true, + "dependencies": { + "iconv-lite": "^0.6.3", + "sax": "^1.2.4" + }, + "bin": { + "needle": "bin/needle" + }, + "engines": { + "node": ">= 4.4.x" + } + }, + "node_modules/needle/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } }, - "nice-try": { + "node_modules/nice-try": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", "dev": true }, - "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", "dev": true, - "requires": { - "path-key": "^2.0.0" + "engines": { + "node": ">=14.0.0" } }, - "number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true - }, - "once": { + "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "dev": true, - "requires": { + "dependencies": { "wrappy": "1" } }, - "onetime": { + "node_modules/onetime": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", "dev": true, - "requires": { + "dependencies": { "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" } }, - "optionator": { + "node_modules/optionator": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", "dev": true, - "requires": { + "dependencies": { "deep-is": "~0.1.3", "fast-levenshtein": "~2.0.6", "levn": "~0.3.0", "prelude-ls": "~1.1.2", "type-check": "~0.3.2", "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" } }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", - "dev": true - }, - "os-locale": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-3.1.0.tgz", - "integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==", - "dev": true, - "requires": { - "execa": "^1.0.0", - "lcid": "^2.0.0", - "mem": "^4.0.0" - } - }, - "os-tmpdir": { + "node_modules/os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", - "dev": true - }, - "p-defer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", - "integrity": "sha1-n26xgvbJqozXQwBKfU+WsZaw+ww=", - "dev": true - }, - "p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", - "dev": true - }, - "p-is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-2.1.0.tgz", - "integrity": "sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==", - "dev": true + "dev": true, + "engines": { + "node": ">=0.10.0" + } }, - "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, - "requires": { - "p-try": "^1.0.0" + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, - "requires": { - "p-limit": "^1.1.0" + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" } }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } }, - "parent-module": { + "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, - "requires": { + "dependencies": { "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" } }, - "path-exists": { + "node_modules/path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true + "dev": true, + "engines": { + "node": ">=4" + } }, - "path-is-absolute": { + "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true - }, - "path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true + "dev": true, + "engines": { + "node": ">=0.10.0" + } }, - "path-key": { + "node_modules/path-key": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true + "dev": true, + "engines": { + "node": ">=4" + } }, - "path-parse": { + "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "pend": { + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/pend": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", "dev": true }, - "pluralize": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-1.2.1.tgz", - "integrity": "sha1-0aIUg/0iu0HlihL6NCGCMUCJfEU=", + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "dev": true }, - "prelude-ls": { + "node_modules/pino": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-9.6.0.tgz", + "integrity": "sha512-i85pKRCt4qMjZ1+L7sy2Ag4t1atFcdbEt76+7iRJn1g2BvsnRMGu9p8pivl9fs63M2kF/A0OacFZhTub+m/qMg==", + "dev": true, + "dependencies": { + "atomic-sleep": "^1.0.0", + "fast-redact": "^3.1.1", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^4.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^4.0.1", + "thread-stream": "^3.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz", + "integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==", + "dev": true, + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-pretty": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.0.0.tgz", + "integrity": "sha512-cQBBIVG3YajgoUjo1FdKVRX6t9XPxwB9lcNJVD5GCnNM4Y6T12YYx8c6zEejxQsU0wrg9TwmDulcE9LR7qcJqA==", + "dev": true, + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^3.0.2", + "fast-safe-stringify": "^2.1.1", + "help-me": "^5.0.0", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pump": "^3.0.0", + "secure-json-parse": "^2.4.0", + "sonic-boom": "^4.0.1", + "strip-json-comments": "^3.1.1" + }, + "bin": { + "pino-pretty": "bin.js" + } + }, + "node_modules/pino-std-serializers": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz", + "integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==", + "dev": true + }, + "node_modules/prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", - "dev": true + "dev": true, + "engines": { + "node": ">= 0.8.0" + } }, - "process-nextick-args": { + "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "dev": true }, - "progress": { + "node_modules/process-warning": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-4.0.1.tgz", + "integrity": "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/promise-timeout": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/promise-timeout/-/promise-timeout-1.3.0.tgz", + "integrity": "sha512-5yANTE0tmi5++POym6OgtFmwfDvOXABD9oj/jLQr5GPEyuNEb7jH4wbbANJceJid49jwhi1RddxnhnEAb/doqg==", "dev": true }, - "proxy-from-env": { + "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", "dev": true }, - "pump": { + "node_modules/pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "dev": true, - "requires": { + "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, - "punycode": { + "node_modules/punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true + "dev": true, + "engines": { + "node": ">=6" + } }, - "puppeteer": { + "node_modules/puppeteer": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-1.20.0.tgz", "integrity": "sha512-bt48RDBy2eIwZPrkgbcwHtb51mj2nKvHOPMaSH2IsWiv7lOG9k9zhaRzpDZafrk05ajMc3cu+lSQYYOfH2DkVQ==", + "deprecated": "< 22.8.2 is no longer supported", "dev": true, - "requires": { + "hasInstallScript": true, + "dependencies": { "debug": "^4.1.0", "extract-zip": "^1.6.6", "https-proxy-agent": "^2.2.1", @@ -2111,100 +2170,98 @@ "rimraf": "^2.6.1", "ws": "^6.1.0" }, + "engines": { + "node": ">=6.4.0" + } + }, + "node_modules/puppeteer-to-istanbul": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/puppeteer-to-istanbul/-/puppeteer-to-istanbul-1.4.0.tgz", + "integrity": "sha512-dzW8u/PMqMZppvoXCFod8IkCTI2JL0yP2YUBbaALnX+iJJ6gqjk77fIoK9MqnMqRZAcoa81GLFfZExakWg/Q4Q==", + "dev": true, "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } + "clone": "^2.1.2", + "mkdirp": "^1.0.4", + "v8-to-istanbul": "^1.2.1", + "yargs": "^15.3.1" } }, - "puppeteer-to-istanbul": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/puppeteer-to-istanbul/-/puppeteer-to-istanbul-1.2.2.tgz", - "integrity": "sha512-uXj2WKvcrszD0BHBp6Ht3FDed4Kfzvzn1fP4IdrYLjZ9Gbxc/YRhT1JBdTz1TMHZVs+HHT/Bbwz3KwSLLK4UBg==", + "node_modules/puppeteer-to-istanbul/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", "dev": true, - "requires": { - "clone": "^2.1.1", - "mkdirp": "^0.5.1", - "v8-to-istanbul": "^1.2.0", - "yargs": "^11.0.0" + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/puppeteer-to-istanbul/node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/puppeteer-to-istanbul/node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/puppeteer/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - }, - "yargs": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-11.1.1.tgz", - "integrity": "sha512-PRU7gJrJaXv3q3yQZ/+/X6KBswZiaQ+zOmdprZcouPYtQgvNU35i+68M4b1ZHLZtYFT5QObFLV+ZkmJYcwKdiw==", - "dev": true, - "requires": { - "cliui": "^4.0.0", - "decamelize": "^1.1.1", - "find-up": "^2.1.0", - "get-caller-file": "^1.0.1", - "os-locale": "^3.1.0", - "require-directory": "^2.1.1", - "require-main-filename": "^1.0.1", - "set-blocking": "^2.0.0", - "string-width": "^2.0.0", - "which-module": "^2.0.0", - "y18n": "^3.2.1", - "yargs-parser": "^9.0.2" - }, - "dependencies": { - "yargs-parser": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-9.0.2.tgz", - "integrity": "sha1-nM9qQ0YP5O1Aqbto9I1DuKaMwHc=", - "dev": true, - "requires": { - "camelcase": "^4.1.0" - } - } - } + "peerDependenciesMeta": { + "supports-color": { + "optional": true } } }, - "readable-stream": { + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "dev": true + }, + "node_modules/readable-stream": { "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "dev": true, - "requires": { + "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", @@ -2214,542 +2271,583 @@ "util-deprecate": "~1.0.1" } }, - "readline2": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/readline2/-/readline2-1.0.1.tgz", - "integrity": "sha1-QQWWCP/BVHV7cV2ZidGZ/783LjU=", - "dev": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "mute-stream": "0.0.5" - }, - "dependencies": { - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "mute-stream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.5.tgz", - "integrity": "sha1-j7+rsKmKJT0xhDMfno3rc3L6xsA=", - "dev": true - } - } - }, - "rechoir": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", - "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", "dev": true, - "requires": { - "resolve": "^1.1.6" + "engines": { + "node": ">= 12.13.0" } }, - "regexpp": { + "node_modules/regexpp": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", - "dev": true + "dev": true, + "engines": { + "node": ">=6.5.0" + } }, - "require-directory": { + "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", - "dev": true - }, - "require-main-filename": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", - "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", - "dev": true - }, - "require-uncached": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/require-uncached/-/require-uncached-1.0.3.tgz", - "integrity": "sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM=", "dev": true, - "requires": { - "caller-path": "^0.1.0", - "resolve-from": "^1.0.0" - }, - "dependencies": { - "resolve-from": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-1.0.1.tgz", - "integrity": "sha1-Jsv+k10a7uq7Kbw/5a6wHpPUQiY=", - "dev": true - } + "engines": { + "node": ">=0.10.0" } }, - "resolve": { + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "node_modules/resolve": { "version": "1.15.1", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", "dev": true, - "requires": { + "dependencies": { "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "resolve-from": { + "node_modules/resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true + "dev": true, + "engines": { + "node": ">=4" + } }, - "restore-cursor": { + "node_modules/restore-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", "dev": true, - "requires": { + "dependencies": { "onetime": "^5.1.0", "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" } }, - "rimraf": { + "node_modules/rimraf": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, - "requires": { + "dependencies": { "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" } }, - "run-async": { + "node_modules/run-async": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz", "integrity": "sha512-xJTbh/d7Lm7SBhc1tNvTpeCHaEzoyxPrqNlvSdMfBTYwaY++UJFyXUOxAtsRUXjlqOfj8luNaR9vjCh4KeV+pg==", "dev": true, - "requires": { + "dependencies": { "is-promise": "^2.1.0" + }, + "engines": { + "node": ">=0.12.0" } }, - "rx-lite": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/rx-lite/-/rx-lite-3.1.2.tgz", - "integrity": "sha1-Gc5QLKVyZl87ZHsQk5+X/RYV8QI=", - "dev": true - }, - "rxjs": { + "node_modules/rxjs": { "version": "6.5.4", "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz", "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==", "dev": true, - "requires": { + "dependencies": { "tslib": "^1.9.0" + }, + "engines": { + "npm": ">=2.0.0" } }, - "safe-buffer": { + "node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", "dev": true }, - "safer-buffer": { + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true }, - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "node_modules/sax": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz", + "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==", + "dev": true + }, + "node_modules/secure-json-parse": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", + "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==", "dev": true }, - "set-blocking": { + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", "dev": true }, - "shebang-command": { + "node_modules/shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", "dev": true, - "requires": { + "dependencies": { "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" } }, - "shebang-regex": { + "node_modules/shebang-regex": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true - }, - "shelljs": { - "version": "0.7.8", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.7.8.tgz", - "integrity": "sha1-3svPh0sNHl+3LhSxZKloMEjprLM=", "dev": true, - "requires": { - "glob": "^7.0.0", - "interpret": "^1.0.0", - "rechoir": "^0.6.2" + "engines": { + "node": ">=0.10.0" } }, - "signal-exit": { + "node_modules/signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", "dev": true }, - "slice-ansi": { + "node_modules/slice-ansi": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", "dev": true, - "requires": { + "dependencies": { "ansi-styles": "^3.2.0", "astral-regex": "^1.0.0", "is-fullwidth-code-point": "^2.0.0" }, + "engines": { + "node": ">=6" + } + }, + "node_modules/slice-ansi/node_modules/is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/sonic-boom": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", + "integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==", + "dev": true, "dependencies": { - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - } + "atomic-sleep": "^1.0.0" } }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "dev": true, + "engines": { + "node": ">= 10.x" + } }, - "sprintf-js": { + "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", "dev": true }, - "string-width": { + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string-width": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "dev": true, - "requires": { + "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.0" }, - "dependencies": { - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - } + "engines": { + "node": ">=8" } }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "node_modules/string-width/node_modules/strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, - "requires": { - "safe-buffer": "~5.1.0" + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" } }, - "strip-ansi": { + "node_modules/strip-ansi": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "dev": true, - "requires": { + "dependencies": { "ansi-regex": "^4.1.0" }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - } + "engines": { + "node": ">=6" } }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true - }, - "strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", - "dev": true + "node_modules/strip-ansi/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "dev": true, + "engines": { + "node": ">=6" + } }, - "strip-json-comments": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", - "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", - "dev": true + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, - "supports-color": { + "node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dev": true, - "requires": { + "dependencies": { "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" } }, - "table": { + "node_modules/table": { "version": "5.4.6", "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", "dev": true, - "requires": { + "dependencies": { "ajv": "^6.10.2", "lodash": "^4.17.14", "slice-ansi": "^2.1.0", "string-width": "^3.0.0" }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/table/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "node_modules/table/node_modules/is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/table/node_modules/string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, "dependencies": { - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - } + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "engines": { + "node": ">=6" } }, - "text-table": { + "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", "dev": true }, - "through": { + "node_modules/thread-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", + "integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==", + "dev": true, + "dependencies": { + "real-require": "^0.2.0" + } + }, + "node_modules/through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", "dev": true }, - "tmp": { + "node_modules/tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", "dev": true, - "requires": { + "dependencies": { "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" } }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "dev": true - }, - "tslib": { + "node_modules/tslib": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.1.tgz", "integrity": "sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA==", "dev": true }, - "type": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", - "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", - "dev": true - }, - "type-check": { + "node_modules/type-check": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", "dev": true, - "requires": { + "dependencies": { "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" } }, - "type-fest": { + "node_modules/type-fest": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true + "dev": true, + "engines": { + "node": ">=8" + } }, - "typedarray": { + "node_modules/typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", "dev": true }, - "uri-js": { + "node_modules/underscore": { + "version": "1.13.7", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz", + "integrity": "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g==", + "dev": true + }, + "node_modules/uri-js": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", "dev": true, - "requires": { + "dependencies": { "punycode": "^2.1.0" } }, - "user-home": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/user-home/-/user-home-2.0.0.tgz", - "integrity": "sha1-nHC/2Babwdy/SGBODwS4tJzenp8=", - "dev": true, - "requires": { - "os-homedir": "^1.0.0" - } - }, - "util-deprecate": { + "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", "dev": true }, - "v8-compile-cache": { + "node_modules/v8-compile-cache": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", "dev": true }, - "v8-to-istanbul": { + "node_modules/v8-to-istanbul": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-1.2.1.tgz", "integrity": "sha512-NglPycIwSQeSJj7VJ6L8vTsPKC9MG5Lcx4n3SvYqNHzklbMI4dGcLJnkLPEPJ3uB8UyTdWviMhM0Ptq+xD5UFQ==", - "dev": true + "dev": true, + "engines": { + "node": ">=10.10.0" + } }, - "which": { + "node_modules/which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", "dev": true, - "requires": { + "dependencies": { "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" } }, - "which-module": { + "node_modules/which-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, - "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } }, - "wrap-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", - "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "requires": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1" + "dependencies": { + "color-convert": "^2.0.1" }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "dev": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - } + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" } }, - "wrappy": { + "node_modules/wrap-ansi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, - "write": { + "node_modules/write": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", "dev": true, - "requires": { + "dependencies": { "mkdirp": "^0.5.1" + }, + "engines": { + "node": ">=4" } }, - "ws": { + "node_modules/ws": { "version": "6.2.3", "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.3.tgz", "integrity": "sha512-jmTjYU0j60B+vHey6TfR3Z7RD61z/hmxBS3VMSGIrroOWXQEneK1zNuotOUrGyBHQj0yrpsLHPWtigEFd13ndA==", "dev": true, - "requires": { + "dependencies": { "async-limiter": "~1.0.0" } }, - "xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true - }, - "y18n": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.2.tgz", - "integrity": "sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ==", + "node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", "dev": true }, - "yargs": { + "node_modules/yargs": { "version": "13.3.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", "dev": true, - "requires": { + "dependencies": { "cliui": "^5.0.0", "find-up": "^3.0.0", "get-caller-file": "^2.0.1", @@ -2760,137 +2858,117 @@ "which-module": "^2.0.0", "y18n": "^4.0.0", "yargs-parser": "^13.1.2" - }, - "dependencies": { - "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true - }, - "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", - "dev": true, - "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" - } - }, - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "p-limit": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", - "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "requires": { - "p-limit": "^2.0.0" - } - }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true - }, - "require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - }, - "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" - } - }, - "y18n": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", - "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", - "dev": true - }, - "yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } - } } }, - "yargs-parser": { + "node_modules/yargs-parser": { "version": "19.0.4", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-19.0.4.tgz", "integrity": "sha512-eXeQm7yXRjPFFyf1voPkZgXQZJjYfjgQUmGPbD2TLtZeIYzvacgWX7sQ5a1HsRgVP+pfKAkRZDNtTGev4h9vhw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "dependencies": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", "dev": true + }, + "node_modules/yargs/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/yargs/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "dependencies": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } } } } diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 325529a6811..db054fd73f5 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.4 +aio-pika==9.5.5 # via -r requirements/../../../packages/service-library/requirements/_base.in aioboto3==13.3.0 # via -r requirements/_base.in @@ -16,9 +16,9 @@ aiofiles==24.1.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # aioboto3 -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -77,9 +77,9 @@ botocore==1.35.81 # aiobotocore # boto3 # s3transfer -botocore-stubs==1.36.6 +botocore-stubs==1.37.4 # via types-aiobotocore -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -98,7 +98,7 @@ charset-normalizer==3.4.1 # via requests click==8.1.8 # via typer -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -112,13 +112,13 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.34 +faststream==0.5.35 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.68.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -150,7 +150,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -163,50 +163,50 @@ opentelemetry-api==1.29.0 # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.29.0 +opentelemetry-exporter-otlp==1.30.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.29.0 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.29.0 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.29.0 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-botocore==0.50b0 +opentelemetry-instrumentation-botocore==0.51b0 # via -r requirements/_base.in -opentelemetry-instrumentation-logging==0.50b0 +opentelemetry-instrumentation-logging==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.50b0 +opentelemetry-instrumentation-redis==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.50b0 +opentelemetry-instrumentation-requests==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.29.0 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.29.0 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.50b0 +opentelemetry-util-http==0.51b0 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -234,7 +234,7 @@ packaging==24.2 # via opentelemetry-instrumentation pamqp==3.3.0 # via aiormq -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl @@ -242,8 +242,10 @@ protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.1 +psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -285,8 +287,20 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -354,13 +368,13 @@ rich==13.9.4 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing s3transfer==0.10.4 # via boto3 -sh==2.2.1 +sh==2.2.2 # via -r requirements/_base.in shellingham==1.5.4 # via typer @@ -368,25 +382,27 @@ six==1.17.0 # via python-dateutil sniffio==1.3.1 # via anyio +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -types-aiobotocore==2.19.0 +types-aiobotocore==2.21.0 # via -r requirements/_base.in -types-aiobotocore-ec2==2.19.0 +types-aiobotocore-ec2==2.21.0 # via types-aiobotocore -types-aiobotocore-s3==2.19.0 +types-aiobotocore-s3==2.21.0 # via types-aiobotocore -types-aiobotocore-ssm==2.19.0 +types-aiobotocore-ssm==2.21.0 # via types-aiobotocore -types-awscrt==0.23.7 +types-awscrt==0.23.10 # via botocore-stubs types-python-dateutil==2.9.0.20241206 # via arrow diff --git a/packages/aws-library/requirements/_test.in b/packages/aws-library/requirements/_test.in index 17bd8b37467..43480f6a427 100644 --- a/packages/aws-library/requirements/_test.in +++ b/packages/aws-library/requirements/_test.in @@ -11,6 +11,8 @@ # testing coverage faker +fastapi +httpx moto[server] pint pytest diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index fced7d6e581..e5c4d651dfc 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -4,12 +4,17 @@ annotated-types==0.7.0 # pydantic antlr4-python3-runtime==4.13.2 # via moto +anyio==4.8.0 + # via + # -c requirements/_base.txt + # httpx + # starlette attrs==25.1.0 # via # -c requirements/_base.txt # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -28,20 +33,22 @@ botocore==1.35.81 # boto3 # moto # s3transfer -botocore-stubs==1.36.6 +botocore-stubs==1.37.4 # via # -c requirements/_base.txt # types-aioboto3 # types-aiobotocore # types-boto3 -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt + # httpcore + # httpx # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto charset-normalizer==3.4.1 # via @@ -51,24 +58,26 @@ click==8.1.8 # via # -c requirements/_base.txt # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto docker==7.1.0 # via moto -faker==35.0.0 +faker==36.1.1 + # via -r requirements/_test.in +fastapi==0.115.11 # via -r requirements/_test.in flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto flexcache==0.3 # via pint @@ -76,11 +85,21 @@ flexparser==0.4 # via pint graphql-core==3.2.6 # via moto +h11==0.14.0 + # via httpcore +httpcore==1.0.7 + # via httpx +httpx==0.28.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in icdiff==2.0.7 # via pytest-icdiff idna==3.10 # via # -c requirements/_base.txt + # anyio + # httpx # requests iniconfig==2.0.0 # via pytest @@ -96,9 +115,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -125,10 +142,8 @@ markupsafe==3.0.2 # via # jinja2 # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy networkx==3.4.2 @@ -156,7 +171,7 @@ pprintpp==0.4.0 # via pytest-icdiff py-cpuinfo==9.0.0 # via pytest-benchmark -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi @@ -165,13 +180,14 @@ pydantic==2.10.6 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator + # fastapi pydantic-core==2.27.2 # via # -c requirements/_base.txt # pydantic pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -203,7 +219,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -215,7 +230,6 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -r requirements/_test.in # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -239,7 +253,7 @@ responses==0.25.6 # via moto rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.22.3 +rpds-py==0.23.1 # via # -c requirements/_base.txt # jsonschema @@ -248,39 +262,48 @@ s3transfer==0.10.4 # via # -c requirements/_base.txt # boto3 -setuptools==75.8.0 +setuptools==75.8.2 # via moto six==1.17.0 # via # -c requirements/_base.txt # python-dateutil # rfc3339-validator +sniffio==1.3.1 + # via + # -c requirements/_base.txt + # anyio +starlette==0.46.0 + # via + # -c requirements/../../../requirements/constraints.txt + # fastapi sympy==1.13.3 # via cfn-lint termcolor==2.5.0 # via pytest-sugar -types-aioboto3==13.4.0 +types-aioboto3==14.0.0 # via -r requirements/_test.in -types-aiobotocore==2.19.0 +types-aiobotocore==2.21.0 # via # -c requirements/_base.txt # types-aioboto3 -types-awscrt==0.23.7 +types-awscrt==0.23.10 # via # -c requirements/_base.txt # botocore-stubs -types-boto3==1.36.6 +types-boto3==1.37.4 # via -r requirements/_test.in -types-s3transfer==0.11.2 +types-s3transfer==0.11.3 # via # types-aioboto3 # types-boto3 typing-extensions==4.12.2 # via # -c requirements/_base.txt + # anyio # aws-sam-translator # cfn-lint - # faker + # fastapi # flexcache # flexparser # pint @@ -289,6 +312,8 @@ typing-extensions==4.12.2 # types-aioboto3 # types-aiobotocore # types-boto3 +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt @@ -300,6 +325,7 @@ urllib3==2.3.0 werkzeug==3.1.3 # via # flask + # flask-cors # moto wrapt==1.17.2 # via diff --git a/packages/aws-library/requirements/_tools.txt b/packages/aws-library/requirements/_tools.txt index ad77b24cb3b..51d5e1879ce 100644 --- a/packages/aws-library/requirements/_tools.txt +++ b/packages/aws-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -44,7 +44,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -56,7 +56,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via # -c requirements/_test.txt # pip-tools @@ -81,7 +81,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index beb752ce077..7d822a4e92a 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -6,34 +6,50 @@ from collections.abc import AsyncGenerator, Sequence from dataclasses import dataclass, field from pathlib import Path -from typing import Any, Final, Protocol, cast +from typing import Any, Final, Literal, Protocol, cast import aioboto3 from aiobotocore.session import ClientCreatorContext from boto3.s3.transfer import TransferConfig from botocore import exceptions as botocore_exc from botocore.client import Config -from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + S3BucketName, + UploadedPart, +) from models_library.basic_types import SHA256Str +from models_library.bytes_iters import BytesIter, DataSize from pydantic import AnyUrl, ByteSize, TypeAdapter +from servicelib.bytes_iters import DEFAULT_READ_CHUNK_SIZE, BytesStreamer from servicelib.logging_utils import log_catch, log_context +from servicelib.s3_utils import FileLikeReader from servicelib.utils import limited_gather from settings_library.s3 import S3Settings from types_aiobotocore_s3 import S3Client from types_aiobotocore_s3.literals import BucketLocationConstraintType -from types_aiobotocore_s3.type_defs import ObjectIdentifierTypeDef +from types_aiobotocore_s3.type_defs import ( + ListObjectsV2RequestTypeDef, + ObjectIdentifierTypeDef, +) -from ._constants import MULTIPART_COPY_THRESHOLD, MULTIPART_UPLOADS_MIN_TOTAL_SIZE +from ._constants import ( + MULTIPART_COPY_THRESHOLD, + MULTIPART_UPLOADS_MIN_TOTAL_SIZE, + S3_OBJECT_DELIMITER, +) from ._error_handler import s3_exception_handler, s3_exception_handler_async_gen from ._errors import S3DestinationNotEmptyError, S3KeyNotFoundError from ._models import ( MultiPartUploadLinks, + PathCursor, S3DirectoryMetaData, S3MetaData, S3ObjectKey, + S3ObjectPrefix, UploadID, ) -from ._utils import compute_num_file_chunks +from ._utils import compute_num_file_chunks, create_final_prefix _logger = logging.getLogger(__name__) @@ -48,13 +64,11 @@ class UploadedBytesTransferredCallback(Protocol): - def __call__(self, bytes_transferred: int, *, file_name: str) -> None: - ... + def __call__(self, bytes_transferred: int, *, file_name: str) -> None: ... class CopiedBytesTransferredCallback(Protocol): - def __call__(self, total_bytes_copied: int, *, file_name: str) -> None: - ... + def __call__(self, total_bytes_copied: int, *, file_name: str) -> None: ... @dataclass(frozen=True) @@ -97,7 +111,10 @@ async def http_check_bucket_connected(self, *, bucket: S3BucketName) -> bool: @s3_exception_handler(_logger) async def create_bucket( - self, *, bucket: S3BucketName, region: BucketLocationConstraintType + self, + *, + bucket: S3BucketName, + region: BucketLocationConstraintType | Literal["us-east-1"], ) -> None: with log_context( _logger, logging.INFO, msg=f"Create bucket {bucket} in {region}" @@ -157,7 +174,99 @@ async def get_directory_metadata( size = 0 async for s3_object in self._list_all_objects(bucket=bucket, prefix=prefix): size += s3_object.size - return S3DirectoryMetaData(size=size) + return S3DirectoryMetaData(prefix=S3ObjectPrefix(prefix), size=ByteSize(size)) + + @s3_exception_handler(_logger) + async def count_objects( + self, + *, + bucket: S3BucketName, + prefix: S3ObjectPrefix | None, + start_after: S3ObjectKey | None, + is_partial_prefix: bool = False, + use_delimiter: bool = True, + ) -> int: + """returns the number of entries in the bucket, defined + by prefix and start_after same as list_objects + """ + paginator = self._client.get_paginator("list_objects_v2") + total_count = 0 + async for page in paginator.paginate( + Bucket=bucket, + Prefix=create_final_prefix(prefix, is_partial_prefix=is_partial_prefix), + StartAfter=start_after or "", + Delimiter=S3_OBJECT_DELIMITER if use_delimiter else "", + ): + total_count += page.get("KeyCount", 0) + return total_count + + @s3_exception_handler(_logger) + async def list_objects( + self, + *, + bucket: S3BucketName, + prefix: S3ObjectPrefix | None, + start_after: S3ObjectKey | None, + limit: int = _MAX_ITEMS_PER_PAGE, + next_cursor: PathCursor | None = None, + is_partial_prefix: bool = False, + ) -> tuple[list[S3MetaData | S3DirectoryMetaData], PathCursor | None]: + """returns a number of entries in the bucket, defined by limit + the entries are sorted alphabetically by key. If a cursor is returned + then the client can call the function again with the cursor to get the + next entries. + + the first entry is defined by start_after + if start_after is None, the first entry is the first one in the bucket + if prefix is not None, only entries with the given prefix are returned + if prefix is None, all entries in the bucket are returned + if next_cursor is set, then the call will return the next entries after the cursor + if is_partial_prefix is set then the prefix is not auto-delimited + (if False equivalent to `ls /home/user/` + if True equivalent to `ls /home/user*`) + limit must be >= 1 and <= _AWS_MAX_ITEMS_PER_PAGE + + Raises: + ValueError: in case of invalid limit + """ + if limit < 1: + msg = "num_objects must be >= 1" + raise ValueError(msg) + if limit > _AWS_MAX_ITEMS_PER_PAGE: + msg = f"num_objects must be <= {_AWS_MAX_ITEMS_PER_PAGE}" + raise ValueError(msg) + + list_config: ListObjectsV2RequestTypeDef = { + "Bucket": bucket, + "Prefix": create_final_prefix(prefix, is_partial_prefix=is_partial_prefix), + "MaxKeys": limit, + "Delimiter": S3_OBJECT_DELIMITER, + } + if start_after: + list_config["StartAfter"] = start_after + if next_cursor: + list_config["ContinuationToken"] = next_cursor + listed_objects = await self._client.list_objects_v2(**list_config) + found_objects: list[S3MetaData | S3DirectoryMetaData] = [] + if "CommonPrefixes" in listed_objects: + # we have folders here + list_subfolders = listed_objects["CommonPrefixes"] + found_objects.extend( + S3DirectoryMetaData.model_construct( + prefix=S3ObjectPrefix(subfolder["Prefix"], size=None) + ) + for subfolder in list_subfolders + if "Prefix" in subfolder + ) + if "Contents" in listed_objects: + found_objects.extend( + S3MetaData.from_botocore_list_objects(obj) + for obj in listed_objects["Contents"] + ) + next_cursor = None + if listed_objects["IsTruncated"]: + next_cursor = listed_objects["NextContinuationToken"] + return found_objects, next_cursor @s3_exception_handler_async_gen(_logger) async def list_objects_paginated( @@ -449,7 +558,7 @@ async def copy_objects_recursively( dst_metadata = await self.get_directory_metadata( bucket=bucket, prefix=dst_prefix ) - if dst_metadata.size > 0: + if dst_metadata.size and dst_metadata.size > 0: raise S3DestinationNotEmptyError(dst_prefix=dst_prefix) await limited_gather( *[ @@ -467,6 +576,57 @@ async def copy_objects_recursively( limit=_MAX_CONCURRENT_COPY, ) + async def get_bytes_streamer_from_object( + self, + bucket_name: S3BucketName, + object_key: S3ObjectKey, + *, + chunk_size: int = DEFAULT_READ_CHUNK_SIZE, + ) -> BytesStreamer: + """stream read an object from S3 chunk by chunk""" + + # NOTE `download_fileobj` cannot be used to implement this because + # it will buffer the entire file in memory instead of reading it + # chunk by chunk + + # below is a quick call + head_response = await self._client.head_object( + Bucket=bucket_name, Key=object_key + ) + data_size = DataSize(head_response["ContentLength"]) + + async def _() -> BytesIter: + # Download the file in chunks + position = 0 + while position < data_size: + # Calculate the range for this chunk + end = min(position + chunk_size - 1, data_size - 1) + range_header = f"bytes={position}-{end}" + + # Download the chunk + response = await self._client.get_object( + Bucket=bucket_name, Key=object_key, Range=range_header + ) + + chunk = await response["Body"].read() + + # Yield the chunk for processing + yield chunk + + position += chunk_size + + return BytesStreamer(data_size, _) + + @s3_exception_handler(_logger) + async def upload_object_from_file_like( + self, + bucket_name: S3BucketName, + object_key: S3ObjectKey, + file_like_reader: FileLikeReader, + ) -> None: + """streams write an object in S3 from an AsyncIterable[bytes]""" + await self._client.upload_fileobj(file_like_reader, bucket_name, object_key) # type: ignore[arg-type] + @staticmethod def is_multipart(file_size: ByteSize) -> bool: return file_size >= MULTIPART_UPLOADS_MIN_TOTAL_SIZE diff --git a/packages/aws-library/src/aws_library/s3/_constants.py b/packages/aws-library/src/aws_library/s3/_constants.py index a94cd555f43..882c02774d2 100644 --- a/packages/aws-library/src/aws_library/s3/_constants.py +++ b/packages/aws-library/src/aws_library/s3/_constants.py @@ -12,3 +12,4 @@ PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB") S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB") +S3_OBJECT_DELIMITER: Final[str] = "/" diff --git a/packages/aws-library/src/aws_library/s3/_models.py b/packages/aws-library/src/aws_library/s3/_models.py index 63e7ad15699..4d722386526 100644 --- a/packages/aws-library/src/aws_library/s3/_models.py +++ b/packages/aws-library/src/aws_library/s3/_models.py @@ -1,23 +1,24 @@ import datetime -from dataclasses import dataclass -from typing import TypeAlias +from pathlib import Path +from typing import TypeAlias, cast -from models_library.api_schemas_storage import ETag +from models_library.api_schemas_storage.storage_schemas import ETag from models_library.basic_types import SHA256Str -from pydantic import AnyUrl, BaseModel, ByteSize +from pydantic import AnyUrl, BaseModel, ByteSize, Field from types_aiobotocore_s3.type_defs import HeadObjectOutputTypeDef, ObjectTypeDef S3ObjectKey: TypeAlias = str +S3ObjectPrefix: TypeAlias = Path UploadID: TypeAlias = str +PathCursor: TypeAlias = str -@dataclass(frozen=True, slots=True, kw_only=True) -class S3MetaData: +class S3MetaData(BaseModel, frozen=True): object_key: S3ObjectKey last_modified: datetime.datetime e_tag: ETag sha256_checksum: SHA256Str | None - size: int + size: ByteSize @staticmethod def from_botocore_head_object( @@ -27,12 +28,8 @@ def from_botocore_head_object( object_key=object_key, last_modified=obj["LastModified"], e_tag=obj["ETag"].strip('"'), - sha256_checksum=( - SHA256Str(obj.get("ChecksumSHA256")) - if obj.get("ChecksumSHA256") - else None - ), - size=obj["ContentLength"], + sha256_checksum=obj.get("ChecksumSHA256"), + size=ByteSize(obj["ContentLength"]), ) @staticmethod @@ -47,18 +44,22 @@ def from_botocore_list_objects( object_key=obj["Key"], last_modified=obj["LastModified"], e_tag=obj["ETag"].strip('"'), - sha256_checksum=( - SHA256Str(obj.get("ChecksumSHA256")) - if obj.get("ChecksumSHA256") - else None - ), - size=obj["Size"], + sha256_checksum=cast(SHA256Str | None, obj.get("ChecksumSHA256")), + size=ByteSize(obj["Size"]), ) + def as_path(self) -> Path: + return Path(self.object_key) -@dataclass(frozen=True) -class S3DirectoryMetaData: - size: int + +class S3DirectoryMetaData(BaseModel, frozen=True): + prefix: S3ObjectPrefix + size: ByteSize | None = Field( + ..., description="Size of the directory if computed, None if unknown" + ) + + def as_path(self) -> Path: + return self.prefix class MultiPartUploadLinks(BaseModel): diff --git a/packages/aws-library/src/aws_library/s3/_utils.py b/packages/aws-library/src/aws_library/s3/_utils.py index 96ad59f57d3..51024f0f15a 100644 --- a/packages/aws-library/src/aws_library/s3/_utils.py +++ b/packages/aws-library/src/aws_library/s3/_utils.py @@ -2,6 +2,9 @@ from pydantic import ByteSize, TypeAdapter +from ._constants import S3_OBJECT_DELIMITER +from ._models import S3ObjectPrefix + _MULTIPART_MAX_NUMBER_OF_PARTS: Final[int] = 10000 # this is artifically defined, if possible we keep a maximum number of requests for parallel @@ -34,3 +37,15 @@ def compute_num_file_chunks(file_size: ByteSize) -> tuple[int, ByteSize]: raise ValueError( msg, ) + + +def create_final_prefix( + prefix: S3ObjectPrefix | None, *, is_partial_prefix: bool +) -> str: + final_prefix = f"{prefix}" if prefix else "" + if prefix and not is_partial_prefix: + final_prefix = ( + f"{final_prefix.rstrip(S3_OBJECT_DELIMITER)}{S3_OBJECT_DELIMITER}" + ) + + return final_prefix diff --git a/packages/aws-library/tests/test_ec2_client.py b/packages/aws-library/tests/test_ec2_client.py index af773d5851c..a1cbdf55c57 100644 --- a/packages/aws-library/tests/test_ec2_client.py +++ b/packages/aws-library/tests/test_ec2_client.py @@ -52,8 +52,7 @@ async def simcore_ec2_api( await ec2.close() -async def test_ec2_client_lifespan(simcore_ec2_api: SimcoreEC2API): - ... +async def test_ec2_client_lifespan(simcore_ec2_api: SimcoreEC2API): ... async def test_aiobotocore_ec2_client_when_ec2_server_goes_up_and_down( @@ -107,12 +106,12 @@ async def test_get_ec2_instance_capabilities( simcore_ec2_api: SimcoreEC2API, ec2_allowed_instances: list[InstanceTypeType], ): - instance_types: list[ - EC2InstanceType - ] = await simcore_ec2_api.get_ec2_instance_capabilities( - cast( - set[InstanceTypeType], - set(ec2_allowed_instances), + instance_types: list[EC2InstanceType] = ( + await simcore_ec2_api.get_ec2_instance_capabilities( + cast( + set[InstanceTypeType], + set(ec2_allowed_instances), + ) ) ) assert instance_types @@ -125,7 +124,9 @@ async def test_get_ec2_instance_capabilities_returns_all_options( instance_types = await simcore_ec2_api.get_ec2_instance_capabilities("ALL") assert instance_types # NOTE: this might need adaptation when moto is updated - assert 700 < len(instance_types) < 852 + assert ( + 850 < len(instance_types) < 877 + ), f"received {len(instance_types)}, the test might need adaptation" async def test_get_ec2_instance_capabilities_raise_with_empty_set( @@ -151,9 +152,9 @@ async def fake_ec2_instance_type( request: pytest.FixtureRequest, ) -> EC2InstanceType: instance_type_name: InstanceTypeType = request.param - instance_types: list[ - EC2InstanceType - ] = await simcore_ec2_api.get_ec2_instance_capabilities({instance_type_name}) + instance_types: list[EC2InstanceType] = ( + await simcore_ec2_api.get_ec2_instance_capabilities({instance_type_name}) + ) assert len(instance_types) == 1 return instance_types[0] diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index bd853f51860..4380827b2fb 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -1,39 +1,57 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument +# pylint:disable=contextmanager-generator-missing-cleanup +# pylint:disable=no-name-in-module +# pylint:disable=protected-access # pylint:disable=redefined-outer-name # pylint:disable=too-many-arguments -# pylint:disable=protected-access -# pylint:disable=no-name-in-module +# pylint:disable=unused-argument +# pylint:disable=unused-variable import asyncio import filecmp import json import logging +import random +import time from collections import defaultdict -from collections.abc import AsyncIterator, Awaitable, Callable +from collections.abc import AsyncIterator, Awaitable, Callable, Iterator +from contextlib import contextmanager from dataclasses import dataclass from pathlib import Path -from typing import Any +from typing import Any, Final +from unittest.mock import AsyncMock, Mock +import aiofiles import botocore.exceptions import pytest from aiohttp import ClientSession -from aws_library.s3._client import S3ObjectKey, SimcoreS3API -from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE +from aws_library.s3._client import _AWS_MAX_ITEMS_PER_PAGE, S3ObjectKey, SimcoreS3API +from aws_library.s3._constants import ( + MULTIPART_COPY_THRESHOLD, + MULTIPART_UPLOADS_MIN_TOTAL_SIZE, +) from aws_library.s3._errors import ( S3BucketInvalidError, S3DestinationNotEmptyError, S3KeyNotFoundError, S3UploadNotFoundError, ) -from aws_library.s3._models import MultiPartUploadLinks +from aws_library.s3._models import MultiPartUploadLinks, S3DirectoryMetaData, S3MetaData from faker import Faker -from models_library.api_schemas_storage import S3BucketName, UploadedPart +from models_library.api_schemas_storage.storage_schemas import ( + S3BucketName, + UploadedPart, +) from models_library.basic_types import SHA256Str from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, TypeAdapter +from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture +from pytest_mock import MockerFixture +from pytest_simcore.helpers.comparing import ( + assert_same_contents, + assert_same_file_content, + get_files_info_from_path, +) from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.parametrizations import ( byte_size_ids, @@ -44,7 +62,13 @@ upload_file_to_presigned_link, ) from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.utils import limited_as_completed +from servicelib.archiving_utils import unarchive_dir +from servicelib.bytes_iters import ArchiveEntries, DiskStreamReader, get_zip_bytes_iter +from servicelib.bytes_iters._models import DataSize +from servicelib.file_utils import remove_directory +from servicelib.progress_bar import ProgressBarData +from servicelib.s3_utils import FileLikeBytesIterReader +from servicelib.utils import limited_as_completed, limited_gather from settings_library.s3 import S3Settings from types_aiobotocore_s3 import S3Client from types_aiobotocore_s3.literals import BucketLocationConstraintType @@ -344,35 +368,50 @@ def set_log_levels_for_noisy_libraries() -> None: @pytest.fixture -async def with_uploaded_folder_on_s3( +async def create_folder_on_s3( create_folder_of_size_with_multiple_files: Callable[ - [ByteSize, ByteSize, ByteSize], Path + [ByteSize, ByteSize, ByteSize, Path | None, NonNegativeInt | None], Path ], upload_file: Callable[[Path, Path], Awaitable[UploadedFile]], directory_size: ByteSize, min_file_size: ByteSize, max_file_size: ByteSize, + depth: NonNegativeInt | None, +) -> Callable[[], Awaitable[list[UploadedFile]]]: + async def _() -> list[UploadedFile]: + # create random files of random size and upload to S3 + folder = create_folder_of_size_with_multiple_files( + ByteSize(directory_size), + ByteSize(min_file_size), + ByteSize(max_file_size), + None, + depth, + ) + list_uploaded_files = [] + + with log_context(logging.INFO, msg=f"uploading {folder}") as ctx: + list_uploaded_files = [ + await uploaded_file + async for uploaded_file in limited_as_completed( + ( + upload_file(file, folder.parent) + for file in folder.rglob("*") + if file.is_file() + ), + limit=20, + ) + ] + ctx.logger.info("uploaded %s files", len(list_uploaded_files)) + return list_uploaded_files + + return _ + + +@pytest.fixture +async def with_uploaded_folder_on_s3( + create_folder_on_s3: Callable[[], Awaitable[list[UploadedFile]]], ) -> list[UploadedFile]: - # create random files of random size and upload to S3 - folder = create_folder_of_size_with_multiple_files( - ByteSize(directory_size), ByteSize(min_file_size), ByteSize(max_file_size) - ) - list_uploaded_files = [] - - with log_context(logging.INFO, msg=f"uploading {folder}") as ctx: - list_uploaded_files = [ - await uploaded_file - async for uploaded_file in limited_as_completed( - ( - upload_file(file, folder.parent) - for file in folder.rglob("*") - if file.is_file() - ), - limit=20, - ) - ] - ctx.logger.info("uploaded %s files", len(list_uploaded_files)) - return list_uploaded_files + return await create_folder_on_s3() @pytest.fixture @@ -414,9 +453,10 @@ async def _copier(src_prefix: str, dst_prefix: str) -> str: src_directory_metadata = await simcore_s3_api.get_directory_metadata( bucket=with_s3_bucket, prefix=src_prefix ) + assert src_directory_metadata.size is not None with log_context( logging.INFO, - msg=f"copying {src_prefix} [{ByteSize(src_directory_metadata.size).human_readable()}] to {dst_prefix}", + msg=f"copying {src_prefix} [{src_directory_metadata.size.human_readable()}] to {dst_prefix}", ) as ctx: progress_cb = _CopyProgressCallback( file_size=src_directory_metadata.size, @@ -495,6 +535,270 @@ async def test_http_check_bucket_connected( ) +_ROOT_LEVEL: Final[int] = -2 + + +def _get_paths_with_prefix( + uploaded_files: list[UploadedFile], *, prefix_level: int, path_prefix: Path | None +) -> tuple[set[Path], set[Path]]: + def _filter_by_prefix(uploaded_file: UploadedFile) -> bool: + return Path(uploaded_file.s3_key).is_relative_to(path_prefix or "") + + directories = { + Path(file.s3_key).parents[_ROOT_LEVEL - prefix_level] + for file in filter(_filter_by_prefix, uploaded_files) + if Path(file.s3_key).parent != path_prefix + } + files = { + Path(file.s3_key) + for file in filter(_filter_by_prefix, uploaded_files) + if Path(file.s3_key).parent == path_prefix + } + return directories, files + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size, depth", + [ + ( + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), + None, + ) + ], + ids=byte_size_ids, +) +async def test_count_objects( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], + simcore_s3_api: SimcoreS3API, +): + # assert pre-conditions + assert len(with_uploaded_folder_on_s3) >= 1, "wrong initialization of test!" + + def find_deepest_file(files: list[UploadedFile]) -> Path: + return Path(max(files, key=lambda f: f.s3_key.count("/")).s3_key) + + deepest_file_path = find_deepest_file(with_uploaded_folder_on_s3) + prefixes = deepest_file_path.parents[0].parts + + # Start from the root and go down to the directory containing the deepest file + for level in range(len(prefixes)): + current_prefix = ( + Path(prefixes[0]).joinpath(*prefixes[1:level]) if level > 0 else None + ) + + directories, files = _get_paths_with_prefix( + with_uploaded_folder_on_s3, prefix_level=level, path_prefix=current_prefix + ) + all_paths = directories | files + + num_objects = await simcore_s3_api.count_objects( + bucket=with_s3_bucket, prefix=current_prefix, start_after=None + ) + assert num_objects == len(all_paths) + + # get number on root is 1 + got = await simcore_s3_api.count_objects( + bucket=with_s3_bucket, prefix=None, start_after=None + ) + assert got == len(directories) + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size, depth", + [ + ( + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), + None, + ) + ], + ids=byte_size_ids, +) +async def test_list_objects_prefix( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], + simcore_s3_api: SimcoreS3API, +): + # assert pre-conditions + assert len(with_uploaded_folder_on_s3) >= 1, "wrong initialization of test!" + + def find_deepest_file(files: list[UploadedFile]) -> Path: + return Path(max(files, key=lambda f: f.s3_key.count("/")).s3_key) + + deepest_file_path = find_deepest_file(with_uploaded_folder_on_s3) + prefixes = deepest_file_path.parents[0].parts + + # Start from the root and go down to the directory containing the deepest file + for level in range(len(prefixes)): + current_prefix = ( + Path(prefixes[0]).joinpath(*prefixes[1:level]) if level > 0 else None + ) + + directories, files = _get_paths_with_prefix( + with_uploaded_folder_on_s3, prefix_level=level, path_prefix=current_prefix + ) + all_paths = directories | files + + objects, next_cursor = await simcore_s3_api.list_objects( + bucket=with_s3_bucket, prefix=current_prefix, start_after=None + ) + assert next_cursor is None + assert len(objects) == len(all_paths) + assert {_.as_path() for _ in objects} == all_paths + + # Check files and directories are correctly separated + received_files = {_ for _ in objects if isinstance(_, S3MetaData)} + received_directories = { + _ for _ in objects if isinstance(_, S3DirectoryMetaData) + } + assert len(received_files) == len(files) + assert len(received_directories) == len(directories) + + +async def test_list_objects_pagination_num_objects_limits( + faker: Faker, + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + simcore_s3_api: SimcoreS3API, +): + with pytest.raises(ValueError, match=r"num_objects must be >= 1"): + await simcore_s3_api.list_objects( + bucket=with_s3_bucket, + prefix=None, + start_after=None, + limit=faker.pyint(max_value=0), + ) + + with pytest.raises(ValueError, match=r"num_objects must be <= \d+"): + await simcore_s3_api.list_objects( + bucket=with_s3_bucket, + prefix=None, + start_after=None, + limit=_AWS_MAX_ITEMS_PER_PAGE + 1, + ) + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size, depth", + [ + ( + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), + 0, + ) + ], + ids=byte_size_ids, +) +@pytest.mark.parametrize("limit", [10, 50, 300], ids=lambda x: f"limit={x}") +async def test_list_objects_pagination( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], + simcore_s3_api: SimcoreS3API, + limit: int, +): + total_num_files = len(with_uploaded_folder_on_s3) + # pre-condition + directories, files = _get_paths_with_prefix( + with_uploaded_folder_on_s3, prefix_level=0, path_prefix=None + ) + assert len(directories) == 1, "test pre-condition not fulfilled!" + assert not files + + first_level_prefix = next(iter(directories)) + first_level_directories, first_level_files = _get_paths_with_prefix( + with_uploaded_folder_on_s3, prefix_level=1, path_prefix=first_level_prefix + ) + assert ( + not first_level_directories + ), "test pre-condition not fulfilled, there should be only files for this test" + assert len(first_level_files) == total_num_files + + # now we will fetch the file objects according to the given limit + num_fetch = int(round(total_num_files / limit + 0.5)) + assert num_fetch >= 1 + start_after_key = None + for i in range(num_fetch - 1): + objects, next_cursor = await simcore_s3_api.list_objects( + bucket=with_s3_bucket, + prefix=first_level_prefix, + start_after=start_after_key, + limit=limit, + ) + assert len(objects) == limit, f"fetch {i} returned a wrong number of objects" + assert isinstance(objects[-1], S3MetaData) + start_after_key = objects[-1].object_key + # last fetch + objects, next_cursor = await simcore_s3_api.list_objects( + bucket=with_s3_bucket, + prefix=first_level_prefix, + start_after=start_after_key, + limit=limit, + ) + assert next_cursor is None + assert len(objects) == (total_num_files - (num_fetch - 1) * limit) + + +@pytest.mark.parametrize( + "directory_size, min_file_size, max_file_size, depth", + [ + ( + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), + 0, + ) + ], + ids=byte_size_ids, +) +async def test_list_objects_partial_prefix( + mocked_s3_server_envs: EnvVarsDict, + with_s3_bucket: S3BucketName, + with_uploaded_folder_on_s3: list[UploadedFile], + simcore_s3_api: SimcoreS3API, +): + total_num_files = len(with_uploaded_folder_on_s3) + # pre-condition + directories, files = _get_paths_with_prefix( + with_uploaded_folder_on_s3, prefix_level=0, path_prefix=None + ) + assert len(directories) == 1, "test pre-condition not fulfilled!" + assert not files + + first_level_prefix = next(iter(directories)) + first_level_directories, first_level_files = _get_paths_with_prefix( + with_uploaded_folder_on_s3, prefix_level=1, path_prefix=first_level_prefix + ) + assert ( + not first_level_directories + ), "test pre-condition not fulfilled, there should be only files for this test" + assert len(first_level_files) == total_num_files + + a_random_file = random.choice(list(first_level_files)) # noqa: S311 + a_partial_prefix = a_random_file.name[0:1] + expected_files = { + file for file in first_level_files if file.name.startswith(a_partial_prefix) + } + + # now we will fetch the file objects according to the given limit + objects, next_cursor = await simcore_s3_api.list_objects( + bucket=with_s3_bucket, + prefix=first_level_prefix / a_partial_prefix, + start_after=None, + is_partial_prefix=True, + ) + assert next_cursor is None + assert len(objects) == len(expected_files) + assert {_.as_path() for _ in objects} == expected_files + + async def test_get_file_metadata( mocked_s3_server_envs: EnvVarsDict, with_s3_bucket: S3BucketName, @@ -829,14 +1133,13 @@ async def test_create_multipart_presigned_upload_link( assert s3_metadata.last_modified assert s3_metadata.e_tag == f"{json.loads(received_e_tag)}" - # completing again raises - with pytest.raises(S3UploadNotFoundError): - await simcore_s3_api.complete_multipart_upload( - bucket=with_s3_bucket, - object_key=file_id, - upload_id=upload_links.upload_id, - uploaded_parts=uploaded_parts, - ) + # completing again does not raise anymore (was raising until moto==5.0.21) + await simcore_s3_api.complete_multipart_upload( + bucket=with_s3_bucket, + object_key=file_id, + upload_id=upload_links.upload_id, + uploaded_parts=uploaded_parts, + ) @pytest.mark.parametrize( @@ -1102,12 +1405,13 @@ async def test_copy_file_invalid_raises( @pytest.mark.parametrize( - "directory_size, min_file_size, max_file_size", + "directory_size, min_file_size, max_file_size, depth", [ ( TypeAdapter(ByteSize).validate_python("1Mib"), TypeAdapter(ByteSize).validate_python("1B"), TypeAdapter(ByteSize).validate_python("10Kib"), + None, ) ], ids=byte_size_ids, @@ -1128,12 +1432,13 @@ async def test_get_directory_metadata( @pytest.mark.parametrize( - "directory_size, min_file_size, max_file_size", + "directory_size, min_file_size, max_file_size, depth", [ ( TypeAdapter(ByteSize).validate_python("1Mib"), TypeAdapter(ByteSize).validate_python("1B"), TypeAdapter(ByteSize).validate_python("10Kib"), + None, ) ], ids=byte_size_ids, @@ -1160,12 +1465,13 @@ async def test_get_directory_metadata_raises( @pytest.mark.parametrize( - "directory_size, min_file_size, max_file_size", + "directory_size, min_file_size, max_file_size, depth", [ ( TypeAdapter(ByteSize).validate_python("1Mib"), TypeAdapter(ByteSize).validate_python("1B"), TypeAdapter(ByteSize).validate_python("10Kib"), + None, ) ], ids=byte_size_ids, @@ -1196,12 +1502,13 @@ async def test_delete_file_recursively( @pytest.mark.parametrize( - "directory_size, min_file_size, max_file_size", + "directory_size, min_file_size, max_file_size, depth", [ ( TypeAdapter(ByteSize).validate_python("1Mib"), TypeAdapter(ByteSize).validate_python("1B"), TypeAdapter(ByteSize).validate_python("10Kib"), + None, ) ], ids=byte_size_ids, @@ -1234,12 +1541,13 @@ async def test_delete_file_recursively_raises( @pytest.mark.parametrize( - "directory_size, min_file_size, max_file_size", + "directory_size, min_file_size, max_file_size, depth", [ ( TypeAdapter(ByteSize).validate_python("1Mib"), TypeAdapter(ByteSize).validate_python("1B"), TypeAdapter(ByteSize).validate_python("10Kib"), + None, ) ], ids=byte_size_ids, @@ -1327,7 +1635,6 @@ def test_upload_file_performance( upload_file: Callable[[Path, Path | None], Awaitable[UploadedFile]], benchmark: BenchmarkFixture, ): - # create random files of random size and upload to S3 file = create_file_of_size(file_size) @@ -1338,17 +1645,19 @@ def run_async_test(*args, **kwargs) -> None: @pytest.mark.parametrize( - "directory_size, min_file_size, max_file_size", + "directory_size, min_file_size, max_file_size, depth", [ ( TypeAdapter(ByteSize).validate_python("1Mib"), TypeAdapter(ByteSize).validate_python("1B"), TypeAdapter(ByteSize).validate_python("10Kib"), + None, ), ( TypeAdapter(ByteSize).validate_python("500Mib"), TypeAdapter(ByteSize).validate_python("10Mib"), TypeAdapter(ByteSize).validate_python("50Mib"), + None, ), ], ids=byte_size_ids, @@ -1375,3 +1684,245 @@ def run_async_test(dst_folder: str) -> None: ) benchmark.pedantic(run_async_test, setup=dst_folder_setup, rounds=4) + + +async def test_read_from_bytes_streamer( + mocked_s3_server_envs: EnvVarsDict, + with_uploaded_file_on_s3: UploadedFile, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + fake_file_name: Path, +): + async with aiofiles.open(fake_file_name, "wb") as f: + bytes_streamer = await simcore_s3_api.get_bytes_streamer_from_object( + with_s3_bucket, with_uploaded_file_on_s3.s3_key, chunk_size=1024 + ) + assert isinstance(bytes_streamer.data_size, DataSize) + async for chunk in bytes_streamer.with_progress_bytes_iter(AsyncMock()): + await f.write(chunk) + + assert bytes_streamer.data_size == fake_file_name.stat().st_size + + await assert_same_file_content(with_uploaded_file_on_s3.local_path, fake_file_name) + + +@pytest.mark.parametrize("upload_from_s3", [True, False]) +async def test_upload_object_from_file_like( + mocked_s3_server_envs: EnvVarsDict, + with_uploaded_file_on_s3: UploadedFile, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + upload_from_s3: bool, +): + object_key = "read_from_s3_write_to_s3" + + if upload_from_s3: + bytes_streamer = await simcore_s3_api.get_bytes_streamer_from_object( + with_s3_bucket, with_uploaded_file_on_s3.s3_key + ) + assert isinstance(bytes_streamer.data_size, DataSize) + await simcore_s3_api.upload_object_from_file_like( + with_s3_bucket, + object_key, + FileLikeBytesIterReader( + bytes_streamer.with_progress_bytes_iter(AsyncMock()) + ), + ) + else: + await simcore_s3_api.upload_object_from_file_like( + with_s3_bucket, + object_key, + FileLikeBytesIterReader( + DiskStreamReader(with_uploaded_file_on_s3.local_path) + .get_bytes_streamer() + .bytes_iter_callable() + ), + ) + + await simcore_s3_api.delete_object(bucket=with_s3_bucket, object_key=object_key) + + +@contextmanager +def _folder_with_files( + create_folder_of_size_with_multiple_files: Callable[ + [ByteSize, ByteSize, ByteSize, Path | None], Path + ], + target_folder: Path, +) -> Iterator[dict[str, Path]]: + target_folder.mkdir(parents=True, exist_ok=True) + folder_path = create_folder_of_size_with_multiple_files( + TypeAdapter(ByteSize).validate_python("10MiB"), + TypeAdapter(ByteSize).validate_python("10KiB"), + TypeAdapter(ByteSize).validate_python("100KiB"), + target_folder, + ) + + relative_names_to_paths = get_files_info_from_path(folder_path) + + yield relative_names_to_paths + + for file in relative_names_to_paths.values(): + file.unlink() + + +@pytest.fixture +def path_local_files_for_archive( + tmp_path: Path, + create_folder_of_size_with_multiple_files: Callable[ + [ByteSize, ByteSize, ByteSize, Path | None], Path + ], +) -> Iterator[Path]: + dir_path = tmp_path / "not_uploaded" + with _folder_with_files(create_folder_of_size_with_multiple_files, dir_path): + yield dir_path + + +@pytest.fixture +async def path_s3_files_for_archive( + tmp_path: Path, + create_folder_of_size_with_multiple_files: Callable[ + [ByteSize, ByteSize, ByteSize, Path | None], Path + ], + s3_client: S3Client, + with_s3_bucket: S3BucketName, +) -> AsyncIterator[Path]: + dir_path = tmp_path / "stored_in_s3" + with _folder_with_files( + create_folder_of_size_with_multiple_files, dir_path + ) as relative_names_to_paths: + await limited_gather( + *( + s3_client.upload_file( + Filename=f"{file}", Bucket=with_s3_bucket, Key=s3_object_key + ) + for s3_object_key, file in relative_names_to_paths.items() + ), + limit=10, + ) + yield dir_path + + await delete_all_object_versions( + s3_client, with_s3_bucket, relative_names_to_paths.keys() + ) + + +@pytest.fixture +def archive_download_path(tmp_path: Path, faker: Faker) -> Iterator[Path]: + path = tmp_path / f"downlaoded_ardhive_{faker.uuid4()}.zip" + yield path + if path.exists(): + path.unlink() + + +@pytest.fixture +async def extracted_archive_path(tmp_path: Path, faker: Faker) -> AsyncIterator[Path]: + path = tmp_path / f"decomrepssed_archive{faker.uuid4()}" + path.mkdir(parents=True, exist_ok=True) + assert path.is_dir() + yield path + await remove_directory(path) + assert not path.is_dir() + + +@pytest.fixture +async def archive_s3_object_key( + with_s3_bucket: S3BucketName, simcore_s3_api: SimcoreS3API +) -> AsyncIterator[S3ObjectKey]: + s3_object_key = "read_from_s3_write_to_s3" + yield s3_object_key + await simcore_s3_api.delete_object(bucket=with_s3_bucket, object_key=s3_object_key) + + +@pytest.fixture +def mocked_progress_bar_cb(mocker: MockerFixture) -> Mock: + def _progress_cb(*args, **kwargs) -> None: + print(f"received progress: {args}, {kwargs}") + + return mocker.Mock(side_effect=_progress_cb) + + +async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_then_upload_to_s3( + mocked_s3_server_envs: EnvVarsDict, + path_local_files_for_archive: Path, + path_s3_files_for_archive: Path, + archive_download_path: Path, + extracted_archive_path: Path, + simcore_s3_api: SimcoreS3API, + with_s3_bucket: S3BucketName, + s3_client: S3Client, + archive_s3_object_key: S3ObjectKey, + mocked_progress_bar_cb: Mock, +): + # In this test: + # - files are read form disk and S3 + # - a zip archive is created on the go + # - the zip archive is streamed to S3 as soon as chunks inside it are created + # Uses no disk and constant memory for the entire opration. + + # 1. assemble and upload zip archive + + archive_entries: ArchiveEntries = [] + + local_files = get_files_info_from_path(path_local_files_for_archive) + for file_name, file_path in local_files.items(): + archive_entries.append( + ( + file_name, + DiskStreamReader(file_path).get_bytes_streamer(), + ) + ) + + s3_files = get_files_info_from_path(path_s3_files_for_archive) + + for s3_object_key in s3_files: + archive_entries.append( + ( + s3_object_key, + await simcore_s3_api.get_bytes_streamer_from_object( + with_s3_bucket, s3_object_key + ), + ) + ) + + # shuffle order of files in archive. + # some will be read from S3 and some from the disk + random.shuffle(archive_entries) + + started = time.time() + + async with ProgressBarData( + num_steps=1, + progress_report_cb=mocked_progress_bar_cb, + description="root_bar", + ) as progress_bar: + await simcore_s3_api.upload_object_from_file_like( + with_s3_bucket, + archive_s3_object_key, + FileLikeBytesIterReader( + get_zip_bytes_iter( + archive_entries, + progress_bar=progress_bar, + chunk_size=MULTIPART_COPY_THRESHOLD, + ) + ), + ) + + duration = time.time() - started + print(f"Zip created on S3 in {duration:.2f} seconds") + + # 2. download zip archive form S3 + print(f"downloading {archive_download_path}") + await s3_client.download_file( + with_s3_bucket, archive_s3_object_key, f"{archive_download_path}" + ) + + # 3. extract archive + await unarchive_dir(archive_download_path, extracted_archive_path) + + # 4. compare + print("comparing files") + all_files_in_zip = get_files_info_from_path(path_local_files_for_archive) | s3_files + + await assert_same_contents( + all_files_in_zip, get_files_info_from_path(extracted_archive_path) + ) diff --git a/packages/common-library/requirements/_test.txt b/packages/common-library/requirements/_test.txt index 5a13cdc370c..3a57a924cb2 100644 --- a/packages/common-library/requirements/_test.txt +++ b/packages/common-library/requirements/_test.txt @@ -2,11 +2,11 @@ annotated-types==0.7.0 # via # -c requirements/_base.txt # pydantic -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in icdiff==2.0.7 # via pytest-icdiff @@ -29,9 +29,11 @@ pydantic-core==2.27.2 # via # -c requirements/_base.txt # pydantic -pydantic-settings==2.7.1 - # via -r requirements/_test.in -pytest==8.3.4 +pydantic-settings==2.7.0 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -56,19 +58,16 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via faker python-dotenv==1.0.1 # via # -r requirements/_test.in # pydantic-settings -six==1.17.0 - # via python-dateutil termcolor==2.5.0 # via pytest-sugar typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker # pydantic # pydantic-core +tzdata==2025.1 + # via faker diff --git a/packages/common-library/requirements/_tools.txt b/packages/common-library/requirements/_tools.txt index a28addac4b1..8e681c5a583 100644 --- a/packages/common-library/requirements/_tools.txt +++ b/packages/common-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -18,15 +18,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -41,7 +41,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -52,7 +52,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -62,9 +62,9 @@ pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -73,7 +73,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/common-library/src/common_library/async_tools.py b/packages/common-library/src/common_library/async_tools.py new file mode 100644 index 00000000000..d92944299e7 --- /dev/null +++ b/packages/common-library/src/common_library/async_tools.py @@ -0,0 +1,24 @@ +import asyncio +import functools +from collections.abc import Awaitable, Callable +from concurrent.futures import Executor +from typing import ParamSpec, TypeVar + +R = TypeVar("R") +P = ParamSpec("P") + + +def make_async( + executor: Executor | None = None, +) -> Callable[[Callable[P, R]], Callable[P, Awaitable[R]]]: + def decorator(func: Callable[P, R]) -> Callable[P, Awaitable[R]]: + @functools.wraps(func) + async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + executor, functools.partial(func, *args, **kwargs) + ) + + return wrapper + + return decorator diff --git a/packages/common-library/src/common_library/error_codes.py b/packages/common-library/src/common_library/error_codes.py index 13b3b1566da..70829a059ca 100644 --- a/packages/common-library/src/common_library/error_codes.py +++ b/packages/common-library/src/common_library/error_codes.py @@ -1,4 +1,4 @@ -""" osparc ERROR CODES (OEC) +"""osparc ERROR CODES (OEC) Unique identifier of an exception instance Intended to report a user about unexpected errors. Unexpected exceptions can be traced by matching the @@ -7,25 +7,79 @@ SEE test_error_codes for some use cases """ +import hashlib import re -from typing import TYPE_CHECKING, Annotated +import traceback +from datetime import UTC, datetime +from typing import Annotated, Final, TypeAlias from pydantic import StringConstraints, TypeAdapter -_LABEL = "OEC:{}" -_PATTERN = r"OEC:\d+" +_LABEL = "OEC:{fingerprint}-{timestamp}" -if TYPE_CHECKING: - ErrorCodeStr = str -else: - ErrorCodeStr = Annotated[ - str, StringConstraints(strip_whitespace=True, pattern=_PATTERN) - ] +_LEN = 12 # chars (~48 bits) +_NAMED_PATTERN = re.compile( + r"OEC:(?P[a-fA-F0-9]{12})-(?P\d{13,14})" + # NOTE: timestamp limits: 13 digits (from 2001), 14 digits (good for ~500+ years) +) +_PATTERN = re.compile(r"OEC:[a-fA-F0-9]{12}-\d{13,14}") + + +ErrorCodeStr: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=_NAMED_PATTERN) +] + + +def _create_fingerprint(exc: BaseException) -> str: + """ + Unique error fingerprint of the **traceback** for deduplication purposes + """ + tb = traceback.extract_tb(exc.__traceback__) + frame_sigs = [f"{frame.name}:{frame.lineno}" for frame in tb] + fingerprint = f"{type(exc).__name__}|" + "|".join(frame_sigs) + # E.g. ZeroDivisionError|foo:23|main:10 + return hashlib.sha256(fingerprint.encode()).hexdigest()[:_LEN] + + +_SECS_TO_MILISECS: Final[int] = 1000 # ms + + +def _create_timestamp() -> int: + """Timestamp as milliseconds since epoch + NOTE: this reduces the precission to milliseconds but it is good enough for our purpose + """ + ts = datetime.now(UTC).timestamp() * _SECS_TO_MILISECS + return int(ts) def create_error_code(exception: BaseException) -> ErrorCodeStr: - return TypeAdapter(ErrorCodeStr).validate_python(_LABEL.format(id(exception))) + """ + Generates a unique error code for the given exception. + + The error code follows the format: `OEC:{traceback}-{timestamp}`. + This code is intended to be shared with the front-end as a `SupportID` + for debugging and support purposes. + """ + return TypeAdapter(ErrorCodeStr).validate_python( + _LABEL.format( + fingerprint=_create_fingerprint(exception), + timestamp=_create_timestamp(), + ) + ) + + +def parse_error_codes(obj) -> list[ErrorCodeStr]: + return TypeAdapter(list[ErrorCodeStr]).validate_python(_PATTERN.findall(f"{obj}")) -def parse_error_code(obj) -> set[ErrorCodeStr]: - return set(re.findall(_PATTERN, f"{obj}")) +def parse_error_code_parts(oec: ErrorCodeStr) -> tuple[str, datetime]: + """Returns traceback-fingerprint and timestamp from `OEC:{traceback}-{timestamp}`""" + match = _NAMED_PATTERN.match(oec) + if not match: + msg = f"Invalid error code format: {oec}" + raise ValueError(msg) + fingerprint = match.group("fingerprint") + timestamp = datetime.fromtimestamp( + float(match.group("timestamp")) / _SECS_TO_MILISECS, tz=UTC + ) + return fingerprint, timestamp diff --git a/packages/common-library/src/common_library/exclude.py b/packages/common-library/src/common_library/exclude.py index 7f2392dec33..e24efb998c4 100644 --- a/packages/common-library/src/common_library/exclude.py +++ b/packages/common-library/src/common_library/exclude.py @@ -12,6 +12,10 @@ def is_unset(v: Any) -> bool: return isinstance(v, UnSet) +def is_set(v: Any) -> bool: + return not isinstance(v, UnSet) + + def as_dict_exclude_unset(**params) -> dict[str, Any]: return {k: v for k, v in params.items() if not isinstance(v, UnSet)} diff --git a/packages/common-library/src/common_library/pagination_tools.py b/packages/common-library/src/common_library/pagination_tools.py new file mode 100644 index 00000000000..a30f654f6a1 --- /dev/null +++ b/packages/common-library/src/common_library/pagination_tools.py @@ -0,0 +1,72 @@ +from collections.abc import Iterable +from typing import Annotated + +from pydantic import BaseModel, ConfigDict, Field, NonNegativeInt, PositiveInt + + +class PageParams(BaseModel): + offset_initial: Annotated[NonNegativeInt, Field(frozen=True)] = 0 + offset_current: NonNegativeInt = 0 # running offset + limit: Annotated[PositiveInt, Field(frozen=True)] + total_number_of_items: int | None = None + + model_config = ConfigDict(validate_assignment=True) + + @property + def offset(self) -> NonNegativeInt: + return self.offset_current + + def has_items_left(self) -> bool: + return ( + self.total_number_of_items is None + or self.offset_current < self.total_number_of_items + ) + + def total_number_of_pages(self) -> NonNegativeInt: + assert self.total_number_of_items # nosec + num_items = self.total_number_of_items - self.offset_initial + return num_items // self.limit + (1 if num_items % self.limit else 0) + + +def iter_pagination_params( + offset: NonNegativeInt = 0, + limit: PositiveInt = 100, + total_number_of_items: NonNegativeInt | None = None, +) -> Iterable[PageParams]: + + kwargs = {} + if total_number_of_items: + kwargs["total_number_of_items"] = total_number_of_items + + page_params = PageParams( + offset_initial=offset, offset_current=offset, limit=limit, **kwargs + ) + + assert page_params.offset_current == page_params.offset_initial # nosec + + total_count_before = page_params.total_number_of_items + page_index = 0 + + while page_params.has_items_left(): + + yield page_params + + if page_params.total_number_of_items is None: + msg = "Must be updated at least before the first iteration, i.e. page_args.total_number_of_items = total_count" + raise RuntimeError(msg) + + if ( + total_count_before + and total_count_before != page_params.total_number_of_items + ): + msg = ( + f"total_number_of_items cannot change on every iteration: before={total_count_before}, now={page_params.total_number_of_items}." + "WARNING: the size of the paginated collection might be changing while it is being iterated?" + ) + raise RuntimeError(msg) + + if page_index == 0: + total_count_before = page_params.total_number_of_items + + page_params.offset_current += limit + assert page_params.offset == page_params.offset_current # nosec diff --git a/packages/common-library/tests/test_async_tools.py b/packages/common-library/tests/test_async_tools.py new file mode 100644 index 00000000000..961bf6f9fde --- /dev/null +++ b/packages/common-library/tests/test_async_tools.py @@ -0,0 +1,45 @@ +import asyncio +from concurrent.futures import ThreadPoolExecutor + +import pytest +from common_library.async_tools import make_async + + +@make_async() +def sync_function(x: int, y: int) -> int: + return x + y + + +@make_async() +def sync_function_with_exception() -> None: + raise ValueError("This is an error!") + + +@pytest.mark.asyncio +async def test_make_async_returns_coroutine(): + result = sync_function(2, 3) + assert asyncio.iscoroutine(result), "Function should return a coroutine" + + +@pytest.mark.asyncio +async def test_make_async_execution(): + result = await sync_function(2, 3) + assert result == 5, "Function should return 5" + + +@pytest.mark.asyncio +async def test_make_async_exception(): + with pytest.raises(ValueError, match="This is an error!"): + await sync_function_with_exception() + + +@pytest.mark.asyncio +async def test_make_async_with_executor(): + executor = ThreadPoolExecutor() + + @make_async(executor) + def heavy_computation(x: int) -> int: + return x * x + + result = await heavy_computation(4) + assert result == 16, "Function should return 16" diff --git a/packages/common-library/tests/test_error_codes.py b/packages/common-library/tests/test_error_codes.py index 5d4d78a5d2b..80f7b8b0808 100644 --- a/packages/common-library/tests/test_error_codes.py +++ b/packages/common-library/tests/test_error_codes.py @@ -4,17 +4,68 @@ # pylint: disable=unused-variable import logging +import time import pytest -from common_library.error_codes import create_error_code, parse_error_code +from common_library.error_codes import ( + create_error_code, + parse_error_code_parts, + parse_error_codes, +) logger = logging.getLogger(__name__) -def test_error_code_use_case(caplog: pytest.LogCaptureFixture): - """use case for error-codes""" +def _level_three(v): + msg = f"An error occurred in level three with {v}" + raise RuntimeError(msg) + + +def _level_two(v): + _level_three(v) + + +def _level_one(v=None): + _level_two(v) + + +def test_exception_fingerprint_consistency(): + error_codes = [] + + for v in range(2): + # emulates different runs of the same function (e.g. different sessions) + try: + _level_one(v) # same even if different value! + except Exception as err: + time.sleep(1) + error_code = create_error_code(err) + error_codes.append(error_code) + + fingerprints, timestamps = list( + zip( + *[parse_error_code_parts(error_code) for error_code in error_codes], + strict=True, + ) + ) + + assert fingerprints[0] == fingerprints[1] + assert timestamps[0] < timestamps[1] + + try: + # Same function but different location + _level_one(0) + except Exception as e2: + time.sleep(1) + error_code_2 = create_error_code(e2) + fingerprint_2, timestamp_2 = parse_error_code_parts(error_code_2) + + assert fingerprints[0] != fingerprint_2 + assert timestamps[1] < timestamp_2 + + +def test_create_log_and_parse_error_code(caplog: pytest.LogCaptureFixture): with pytest.raises(RuntimeError) as exc_info: - raise RuntimeError("Something unexpected went wrong") + _level_one() # 1. Unexpected ERROR err = exc_info.value @@ -33,11 +84,11 @@ def test_error_code_use_case(caplog: pytest.LogCaptureFixture): logger.exception("Fake Unexpected error", extra={"error_code": error_code}) # logs something like E.g. 2022-07-06 14:31:13,432 OEC:140350117529856 : Fake Unexpected error - assert parse_error_code( + assert parse_error_codes( f"2022-07-06 14:31:13,432 {error_code} : Fake Unexpected error" - ) == { + ) == [ error_code, - } + ] assert caplog.records[0].error_code == error_code assert caplog.records[0] @@ -49,6 +100,6 @@ def test_error_code_use_case(caplog: pytest.LogCaptureFixture): f"This is a user-friendly message to inform about an error. [{error_code}]" ) - assert parse_error_code(user_message) == { + assert parse_error_codes(user_message) == [ error_code, - } + ] diff --git a/packages/common-library/tests/test_pagination_tools.py b/packages/common-library/tests/test_pagination_tools.py new file mode 100644 index 00000000000..56127c038a3 --- /dev/null +++ b/packages/common-library/tests/test_pagination_tools.py @@ -0,0 +1,88 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import asyncio +from collections.abc import Callable + +import pytest +from common_library.pagination_tools import iter_pagination_params +from pydantic import ValidationError + + +@pytest.fixture +def all_items() -> list[int]: + return list(range(11)) + + +@pytest.fixture +async def get_page(all_items: list[int]) -> Callable: + async def _get_page(offset, limit) -> tuple[list[int], int]: + await asyncio.sleep(0) + return all_items[offset : offset + limit], len(all_items) + + return _get_page + + +@pytest.mark.parametrize("limit", [2, 3, 5]) +@pytest.mark.parametrize("offset", [0, 1, 5]) +async def test_iter_pages_args( + limit: int, offset: int, get_page: Callable, all_items: list[int] +): + + last_page = [None] * limit + + num_items = len(all_items) - offset + expected_num_pages = num_items // limit + (1 if num_items % limit else 0) + + num_pages = 0 + page_args = None + for page_index, page_args in enumerate(iter_pagination_params(offset, limit)): + + page_items, page_args.total_number_of_items = await get_page( + page_args.offset_current, page_args.limit + ) + + assert set(last_page) != set(page_items) + last_page = list(page_items) + + # contains sub-sequence + assert str(page_items)[1:-1] in str(all_items)[1:-1] + + num_pages = page_index + 1 + + assert last_page[-1] == all_items[-1] + assert num_pages == expected_num_pages + + assert page_args is not None + assert not page_args.has_items_left() + assert page_args.total_number_of_pages() == num_pages + + +@pytest.mark.parametrize("limit", [-1, 0]) +@pytest.mark.parametrize("offset", [-1]) +def test_iter_pages_args_invalid(limit: int, offset: int): + + with pytest.raises(ValidationError): # noqa: PT012 + for _ in iter_pagination_params(offset=offset, limit=limit): + pass + + +def test_fails_if_total_number_of_items_not_set(): + with pytest.raises( # noqa: PT012 + RuntimeError, + match="page_args.total_number_of_items = total_count", + ): + for _ in iter_pagination_params(limit=2): + pass + + +def test_fails_if_total_number_of_items_changes(): + with pytest.raises( # noqa: PT012 + RuntimeError, + match="total_number_of_items cannot change on every iteration", + ): + for page_params in iter_pagination_params(limit=2, total_number_of_items=4): + assert page_params.total_number_of_items == 4 + page_params.total_number_of_items += 1 diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index 57200739ea7..fb7c4596721 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -15,17 +15,17 @@ cloudpickle==3.1.1 # via # dask # distributed -dask==2025.1.0 +dask==2025.2.0 # via # -r requirements/_base.in # distributed -distributed==2025.1.0 +distributed==2025.2.0 # via dask dnspython==2.7.0 # via email-validator email-validator==2.2.0 # via pydantic -fsspec==2024.12.0 +fsspec==2025.2.0 # via dask idna==3.10 # via email-validator @@ -74,7 +74,7 @@ packaging==24.2 # distributed partd==1.4.2 # via dask -psutil==6.1.1 +psutil==7.0.0 # via distributed pydantic==2.10.6 # via @@ -100,8 +100,14 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 @@ -134,7 +140,7 @@ rich==13.9.4 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -153,7 +159,7 @@ toolz==1.0.0 # partd tornado==6.4.2 # via distributed -typer==0.15.1 +typer==0.15.2 # via -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow diff --git a/packages/dask-task-models-library/requirements/_test.txt b/packages/dask-task-models-library/requirements/_test.txt index 01163ab9a8a..b7dd17de3ca 100644 --- a/packages/dask-task-models-library/requirements/_test.txt +++ b/packages/dask-task-models-library/requirements/_test.txt @@ -1,8 +1,8 @@ -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in flexcache==0.3 # via pint @@ -25,7 +25,7 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -50,25 +50,18 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in -six==1.17.0 - # via - # -c requirements/_base.txt - # python-dateutil termcolor==2.5.0 # via pytest-sugar typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker # flexcache # flexparser # pint +tzdata==2025.1 + # via faker diff --git a/packages/dask-task-models-library/requirements/_tools.txt b/packages/dask-task-models-library/requirements/_tools.txt index 6a78930fd4d..dd6e9ca789e 100644 --- a/packages/dask-task-models-library/requirements/_tools.txt +++ b/packages/dask-task-models-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -43,7 +43,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -78,7 +78,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt index 7e40be8811c..9daa42c4b0a 100644 --- a/packages/models-library/requirements/_base.txt +++ b/packages/models-library/requirements/_base.txt @@ -36,8 +36,11 @@ pydantic-extra-types==2.10.2 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in -pydantic-settings==2.7.1 - # via -r requirements/_base.in +pydantic-settings==2.7.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 @@ -48,7 +51,7 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing diff --git a/packages/models-library/requirements/_test.txt b/packages/models-library/requirements/_test.txt index 3709bb4012a..fe635c77660 100644 --- a/packages/models-library/requirements/_test.txt +++ b/packages/models-library/requirements/_test.txt @@ -2,11 +2,11 @@ attrs==25.1.0 # via # -c requirements/_base.txt # referencing -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in flexcache==0.3 # via pint @@ -34,11 +34,11 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -propcache==0.2.1 +propcache==0.3.0 # via yarl -psutil==6.1.1 +psutil==7.0.0 # via -r requirements/_test.in -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -63,10 +63,6 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker python-dotenv==1.0.1 # via # -c requirements/_base.txt @@ -80,14 +76,10 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # types-jsonschema -rpds-py==0.22.3 +rpds-py==0.23.1 # via # -c requirements/_base.txt # referencing -six==1.17.0 - # via - # -c requirements/_base.txt - # python-dateutil termcolor==2.5.0 # via pytest-sugar types-jsonschema==4.23.0.20241208 @@ -97,9 +89,10 @@ types-pyyaml==6.0.12.20241230 typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker # flexcache # flexparser # pint +tzdata==2025.1 + # via faker yarl==1.18.3 # via -r requirements/_test.in diff --git a/packages/models-library/requirements/_tools.txt b/packages/models-library/requirements/_tools.txt index 0a14a6571dd..3ae7f8fc714 100644 --- a/packages/models-library/requirements/_tools.txt +++ b/packages/models-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,9 +19,9 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint @@ -31,7 +31,7 @@ mccabe==0.7.0 # via pylint mdurl==0.1.2 # via markdown-it-py -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -46,7 +46,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -60,7 +60,7 @@ pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt pygments==2.19.1 # via rich -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -73,15 +73,15 @@ pyyaml==6.0.2 # pre-commit rich==13.9.4 # via typer -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools shellingham==1.5.4 # via typer tomlkit==0.13.2 # via pylint -typer==0.15.1 +typer==0.15.2 # via -r requirements/_tools.in typing-extensions==4.12.2 # via @@ -89,7 +89,7 @@ typing-extensions==4.12.2 # -c requirements/_test.txt # mypy # typer -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/models-library/src/models_library/access_rights.py b/packages/models-library/src/models_library/access_rights.py index a6cea15a946..a78ba105ac8 100644 --- a/packages/models-library/src/models_library/access_rights.py +++ b/packages/models-library/src/models_library/access_rights.py @@ -1,9 +1,18 @@ +from typing import Annotated + from pydantic import BaseModel, ConfigDict, Field class AccessRights(BaseModel): - read: bool = Field(..., description="has read access") - write: bool = Field(..., description="has write access") - delete: bool = Field(..., description="has deletion rights") + read: Annotated[bool, Field(description="has read access")] + write: Annotated[bool, Field(description="has write access")] + delete: Annotated[bool, Field(description="has deletion rights")] + + model_config = ConfigDict(extra="forbid") + + +class ExecutableAccessRights(BaseModel): + write: Annotated[bool, Field(description="can change executable settings")] + execute: Annotated[bool, Field(description="can run executable")] model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/api_schemas_catalog/_base.py b/packages/models-library/src/models_library/api_schemas_catalog/_base.py new file mode 100644 index 00000000000..35930723500 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_catalog/_base.py @@ -0,0 +1,9 @@ +from pydantic import BaseModel + + +class CatalogInputSchema(BaseModel): + ... + + +class CatalogOutputSchema(BaseModel): + ... diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services.py b/packages/models-library/src/models_library/api_schemas_catalog/services.py index c2551c43cb2..a80490fad36 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services.py @@ -1,8 +1,10 @@ from datetime import datetime -from typing import Any, TypeAlias +from typing import Annotated, Any, TypeAlias +from common_library.basic_types import DEFAULT_FACTORY from models_library.rpc_pagination import PageRpc -from pydantic import BaseModel, ConfigDict, Field, HttpUrl, NonNegativeInt +from pydantic import ConfigDict, Field, HttpUrl, NonNegativeInt +from pydantic.config import JsonDict from ..boot_options import BootOptions from ..emails import LowerCaseEmailStr @@ -20,60 +22,7 @@ from ..services_resources import ServiceResourcesDict from ..services_types import ServiceKey, ServiceVersion from ..utils.change_case import snake_to_camel - - -class ServiceUpdate(ServiceMetaDataEditable, ServiceAccessRights): - model_config = ConfigDict( - json_schema_extra={ - "example": { - # ServiceAccessRights - "accessRights": { - 1: { - "execute_access": False, - "write_access": False, - }, # type: ignore[dict-item] - 2: { - "execute_access": True, - "write_access": True, - }, # type: ignore[dict-item] - 44: { - "execute_access": False, - "write_access": False, - }, # type: ignore[dict-item] - }, - # ServiceMetaData = ServiceCommonData + - "name": "My Human Readable Service Name", - "thumbnail": None, - "description": "An interesting service that does something", - "classifiers": ["RRID:SCR_018997", "RRID:SCR_019001"], - "quality": { - "tsr": { - "r01": {"level": 3, "references": ""}, - "r02": {"level": 2, "references": ""}, - "r03": {"level": 0, "references": ""}, - "r04": {"level": 0, "references": ""}, - "r05": {"level": 2, "references": ""}, - "r06": {"level": 0, "references": ""}, - "r07": {"level": 0, "references": ""}, - "r08": {"level": 1, "references": ""}, - "r09": {"level": 0, "references": ""}, - "r10": {"level": 0, "references": ""}, - }, - "enabled": True, - "annotations": { - "vandv": "", - "purpose": "", - "standards": "", - "limitations": "", - "documentation": "", - "certificationLink": "", - "certificationStatus": "Uncertified", - }, - }, - } - } - ) - +from ._base import CatalogInputSchema, CatalogOutputSchema _EXAMPLE_FILEPICKER: dict[str, Any] = { "name": "File Picker", @@ -124,6 +73,7 @@ class ServiceUpdate(ServiceMetaDataEditable, ServiceAccessRights): "thumbnail": None, "description": "A service which awaits for time to pass, two times.", "description_ui": True, + "icon": "https://cdn-icons-png.flaticon.com/512/25/25231.png", "classifiers": [], "quality": {}, "accessRights": {"1": {"execute": True, "write": False}}, @@ -205,36 +155,44 @@ class ServiceUpdate(ServiceMetaDataEditable, ServiceAccessRights): class ServiceGet( ServiceMetaDataPublished, ServiceAccessRights, ServiceMetaDataEditable ): # pylint: disable=too-many-ancestors - owner: LowerCaseEmailStr | None = Field( - description="None when the owner email cannot be found in the database" - ) + owner: Annotated[ + LowerCaseEmailStr | None, + Field(description="None when the owner email cannot be found in the database"), + ] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update({"examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]}) model_config = ConfigDict( extra="ignore", populate_by_name=True, - json_schema_extra={"examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]}, + json_schema_extra=_update_json_schema_extra, ) -class ServiceGetV2(BaseModel): +class _BaseServiceGetV2(CatalogOutputSchema): + # Model used in catalog's rpc and rest interfaces key: ServiceKey version: ServiceVersion name: str thumbnail: HttpUrl | None = None + icon: HttpUrl | None = None description: str description_ui: bool = False version_display: str | None = None - service_type: ServiceType = Field(default=..., alias="type") + service_type: Annotated[ServiceType, Field(alias="type")] contact: LowerCaseEmailStr | None - authors: list[Author] = Field(..., min_length=1) - owner: LowerCaseEmailStr | None = Field( - description="None when the owner email cannot be found in the database" - ) + authors: Annotated[list[Author], Field(min_length=1)] + owner: Annotated[ + LowerCaseEmailStr | None, + Field(description="None when the owner email cannot be found in the database"), + ] inputs: ServiceInputsDict outputs: ServiceOutputsDict @@ -244,86 +202,139 @@ class ServiceGetV2(BaseModel): access_rights: dict[GroupID, ServiceGroupAccessRightsV2] | None - classifiers: list[str] | None = [] - quality: dict[str, Any] = {} + classifiers: Annotated[ + list[str] | None, + Field(default_factory=list), + ] = DEFAULT_FACTORY - history: list[ServiceRelease] = Field( - default_factory=list, - description="history of releases for this service at this point in time, starting from the newest to the oldest." - " It includes current release.", - json_schema_extra={"default": []}, - ) + quality: Annotated[ + dict[str, Any], + Field(default_factory=dict), + ] = DEFAULT_FACTORY model_config = ConfigDict( extra="forbid", populate_by_name=True, alias_generator=snake_to_camel, - json_schema_extra={ - "examples": [ - { - **_EXAMPLE_SLEEPER, # v2.2.1 (latest) - "history": [ - { + ) + + +class LatestServiceGet(_BaseServiceGetV2): + release: Annotated[ + ServiceRelease, + Field(description="release information of current (latest) service"), + ] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + **_EXAMPLE_SLEEPER, # v2.2.1 (latest) + "release": { "version": _EXAMPLE_SLEEPER["version"], "version_display": "Summer Release", - "released": "2024-07-20T15:00:00", + "released": "2025-07-20T15:00:00", }, - { - "version": "2.0.0", - "compatibility": { - "canUpdateTo": {"version": _EXAMPLE_SLEEPER["version"]}, + } + ] + } + ) + + model_config = ConfigDict( + json_schema_extra=_update_json_schema_extra, + ) + + +class ServiceGetV2(_BaseServiceGetV2): + # Model used in catalog's rpc and rest interfaces + history: Annotated[ + list[ServiceRelease], + Field( + default_factory=list, + description="history of releases for this service at this point in time, starting from the newest to the oldest." + " It includes current release.", + json_schema_extra={"default": []}, + ), + ] = DEFAULT_FACTORY + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + **_EXAMPLE_SLEEPER, # v2.2.1 (latest) + "history": [ + { + "version": _EXAMPLE_SLEEPER["version"], + "version_display": "Summer Release", + "released": "2024-07-21T15:00:00", }, - }, - {"version": "0.9.11"}, - {"version": "0.9.10"}, - { - "version": "0.9.8", - "compatibility": { - "canUpdateTo": {"version": "0.9.11"}, + { + "version": "2.0.0", + "compatibility": { + "canUpdateTo": { + "version": _EXAMPLE_SLEEPER["version"] + }, + }, }, - }, - { - "version": "0.9.1", - "versionDisplay": "Matterhorn", - "released": "2024-01-20T18:49:17", - "compatibility": { - "can_update_to": {"version": "0.9.11"}, + {"version": "0.9.11"}, + {"version": "0.9.10"}, + { + "version": "0.9.8", + "compatibility": { + "canUpdateTo": {"version": "0.9.11"}, + }, }, - }, - { - "version": "0.9.0", - "retired": "2024-07-20T15:00:00", - }, - {"version": "0.8.0"}, - {"version": "0.1.0"}, - ], - }, - { - **_EXAMPLE_FILEPICKER_V2, - "history": [ - { - "version": _EXAMPLE_FILEPICKER_V2["version"], - "version_display": "Odei Release", - "released": "2025-03-25T00:00:00", - } - ], - }, - ] - }, + { + "version": "0.9.1", + "versionDisplay": "Matterhorn", + "released": "2024-01-20T18:49:17", + "compatibility": { + "can_update_to": {"version": "0.9.11"}, + }, + }, + { + "version": "0.9.0", + "retired": "2024-07-20T16:00:00", + }, + {"version": "0.8.0"}, + {"version": "0.1.0"}, + ], + }, + { + **_EXAMPLE_FILEPICKER_V2, + "history": [ + { + "version": _EXAMPLE_FILEPICKER_V2["version"], + "version_display": "Odei Release", + "released": "2025-03-25T00:00:00", + } + ], + }, + ] + } + ) + + model_config = ConfigDict( + json_schema_extra=_update_json_schema_extra, ) PageRpcServicesGetV2: TypeAlias = PageRpc[ # WARNING: keep this definition in models_library and not in the RPC interface - ServiceGetV2 + LatestServiceGet ] ServiceResourcesGet: TypeAlias = ServiceResourcesDict -class ServiceUpdateV2(BaseModel): +class ServiceUpdateV2(CatalogInputSchema): name: str | None = None thumbnail: HttpUrl | None = None + icon: HttpUrl | None = None description: str | None = None description_ui: bool = False @@ -346,3 +357,11 @@ class ServiceUpdateV2(BaseModel): assert set(ServiceUpdateV2.model_fields.keys()) - set( # nosec ServiceGetV2.model_fields.keys() ) == {"deprecated"} + + +class MyServiceGet(CatalogOutputSchema): + key: ServiceKey + release: ServiceRelease + + owner: GroupID | None + my_access_rights: ServiceGroupAccessRightsV2 diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/__init__.py b/packages/models-library/src/models_library/api_schemas_datcore_adapter/__init__.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/__init__.py rename to packages/models-library/src/models_library/api_schemas_datcore_adapter/__init__.py diff --git a/packages/models-library/src/models_library/api_schemas_datcore_adapter/datasets.py b/packages/models-library/src/models_library/api_schemas_datcore_adapter/datasets.py new file mode 100644 index 00000000000..16d67cb8ddd --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_datcore_adapter/datasets.py @@ -0,0 +1,45 @@ +from datetime import datetime +from enum import Enum, unique +from pathlib import Path +from typing import Annotated + +from pydantic import BaseModel, ByteSize, Field + + +class DatasetMetaData(BaseModel): + id: str + display_name: str + size: Annotated[ + ByteSize | None, Field(description="Size of the dataset in bytes if available") + ] + + +@unique +class DataType(str, Enum): + FILE = "FILE" + FOLDER = "FOLDER" + + +class PackageMetaData(BaseModel): + path: Path + display_path: Path + package_id: str + name: str + filename: str + s3_bucket: str + size: ByteSize + created_at: datetime + updated_at: datetime + + +class FileMetaData(BaseModel): + dataset_id: str + package_id: str + id: str + name: str + type: str + path: Path + size: int + created_at: datetime + last_modified_at: datetime + data_type: DataType diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py index b5a8d8443b9..acd73831b22 100644 --- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py +++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py @@ -10,7 +10,7 @@ class TaskStatus(BaseModel): task_progress: TaskProgress done: bool - started: datetime + started: datetime | None class TaskResult(BaseModel): diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_checkouts.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_checkouts.py index bdf578bfe82..8257aa35186 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_checkouts.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_checkouts.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import NamedTuple -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from models_library.products import ProductName from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -15,6 +15,8 @@ class LicensedItemCheckoutGet(BaseModel): licensed_item_checkout_id: LicensedItemCheckoutID licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID user_id: UserID user_email: str @@ -30,6 +32,8 @@ class LicensedItemCheckoutGet(BaseModel): { "licensed_item_checkout_id": "beb16d18-d57d-44aa-a638-9727fa4a72ef", "licensed_item_id": "303942ef-6d31-4ba8-afbe-dbb1fce2a953", + "key": "Duke", + "version": "1.0.0", "wallet_id": 1, "user_id": 1, "user_email": "test@test.com", diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_purchases.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_purchases.py index f6a288cb126..e9ee9e4ae67 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_purchases.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/licensed_items_purchases.py @@ -2,7 +2,7 @@ from decimal import Decimal from typing import NamedTuple -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from models_library.products import ProductName from models_library.resource_tracker import PricingUnitCostId from models_library.resource_tracker_licensed_items_purchases import ( @@ -17,6 +17,8 @@ class LicensedItemPurchaseGet(BaseModel): licensed_item_purchase_id: LicensedItemPurchaseID product_name: ProductName licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID wallet_name: str pricing_unit_cost_id: PricingUnitCostId @@ -36,6 +38,8 @@ class LicensedItemPurchaseGet(BaseModel): "licensed_item_purchase_id": "beb16d18-d57d-44aa-a638-9727fa4a72ef", "product_name": "osparc", "licensed_item_id": "303942ef-6d31-4ba8-afbe-dbb1fce2a953", + "key": "Duke", + "version": "1.0.0", "wallet_id": 1, "wallet_name": "My Wallet", "pricing_unit_cost_id": 1, diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py index 0fd494dc998..08696b5b61c 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py @@ -2,7 +2,7 @@ from decimal import Decimal from typing import NamedTuple -from pydantic import BaseModel, ConfigDict, PositiveInt +from pydantic import BaseModel, ConfigDict, PositiveInt, model_validator from ..resource_tracker import ( HardwareInfo, @@ -10,15 +10,16 @@ PricingPlanId, PricingUnitCostId, PricingUnitId, - UnitExtraInfo, + UnitExtraInfoLicense, + UnitExtraInfoTier, ) from ..services_types import ServiceKey, ServiceVersion -class PricingUnitGet(BaseModel): +class RutPricingUnitGet(BaseModel): pricing_unit_id: PricingUnitId unit_name: str - unit_extra_info: UnitExtraInfo + unit_extra_info: UnitExtraInfoTier | UnitExtraInfoLicense current_cost_per_unit: Decimal current_cost_per_unit_id: PricingUnitCostId default: bool @@ -30,30 +31,68 @@ class PricingUnitGet(BaseModel): { "pricing_unit_id": 1, "unit_name": "SMALL", - "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "unit_extra_info": UnitExtraInfoTier.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "current_cost_per_unit": 5.7, "current_cost_per_unit_id": 1, "default": True, - "specific_info": hw_config_example, - } - for hw_config_example in HardwareInfo.model_config["json_schema_extra"][ - "examples" - ] # type: ignore[index,union-attr] + "specific_info": HardwareInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + }, + { + "pricing_unit_id": 1, + "unit_name": "SMALL", + "unit_extra_info": UnitExtraInfoTier.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "current_cost_per_unit": 5.7, + "current_cost_per_unit_id": 1, + "default": True, + "specific_info": HardwareInfo.model_config["json_schema_extra"]["examples"][1], # type: ignore [index] + }, + { + "pricing_unit_id": 2, + "unit_name": "5 seats", + "unit_extra_info": UnitExtraInfoLicense.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "current_cost_per_unit": 10.5, + "current_cost_per_unit_id": 2, + "default": False, + "specific_info": HardwareInfo.model_config["json_schema_extra"]["examples"][1], # type: ignore [index] + }, ] } ) -class PricingPlanGet(BaseModel): +class RutPricingPlanGet(BaseModel): pricing_plan_id: PricingPlanId display_name: str description: str classification: PricingPlanClassification created_at: datetime pricing_plan_key: str - pricing_units: list[PricingUnitGet] | None + pricing_units: list[RutPricingUnitGet] | None is_active: bool + @model_validator(mode="after") + def ensure_classification_matches_extra_info(self): + """Enforce that all PricingUnitGet.unit_extra_info match the plan's classification.""" + if not self.pricing_units: + return self # No units to check + + for unit in self.pricing_units: + if ( + self.classification == PricingPlanClassification.TIER + and not isinstance(unit.unit_extra_info, UnitExtraInfoTier) + ): + error_message = ( + "For TIER classification, unit_extra_info must be UnitExtraInfoTier" + ) + raise ValueError(error_message) + if ( + self.classification == PricingPlanClassification.LICENSE + and not isinstance(unit.unit_extra_info, UnitExtraInfoLicense) + ): + error_message = "For LICENSE classification, unit_extra_info must be UnitExtraInfoLicense" + raise ValueError(error_message) + return self + model_config = ConfigDict( json_schema_extra={ "examples": [ @@ -64,21 +103,48 @@ class PricingPlanGet(BaseModel): "classification": "TIER", "created_at": "2023-01-11 13:11:47.293595", "pricing_plan_key": "pricing-plan-sleeper", - "pricing_units": [pricing_unit_get_example], + "pricing_units": [ + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][ # type: ignore [index] + 0 # type: ignore [index] + ] + ], "is_active": True, - } - for pricing_unit_get_example in PricingUnitGet.model_config[ - "json_schema_extra" - ][ - "examples" - ] # type: ignore[index,union-attr] + }, + { + "pricing_plan_id": 1, + "display_name": "Pricing Plan for Sleeper", + "description": "Special Pricing Plan for Sleeper", + "classification": "TIER", + "created_at": "2023-01-11 13:11:47.293595", + "pricing_plan_key": "pricing-plan-sleeper", + "pricing_units": [ + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][ # type: ignore [index] + 1 # type: ignore [index] + ] + ], + "is_active": True, + }, + { + "pricing_plan_id": 2, + "display_name": "VIP model A", + "description": "Special Pricing Plan for VIP", + "classification": "LICENSE", + "created_at": "2023-01-11 13:11:47.293595", + "pricing_plan_key": "vip-model-a", + "pricing_units": [ + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][ # type: ignore [index] + 2 # type: ignore [index] + ] + ], + "is_active": True, + }, ] } ) -class PricingPlanPage(NamedTuple): - items: list[PricingPlanGet] +class RutPricingPlanPage(NamedTuple): + items: list[RutPricingPlanGet] total: PositiveInt diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/__init__.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/__init__.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/__init__.py rename to packages/models-library/src/models_library/api_schemas_rpc_async_jobs/__init__.py diff --git a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py new file mode 100644 index 00000000000..3fb24ae952d --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py @@ -0,0 +1,35 @@ +from typing import Any, TypeAlias +from uuid import UUID + +from models_library.users import UserID +from pydantic import BaseModel + +from ..progress_bar import ProgressReport + +AsyncJobId: TypeAlias = UUID + + +class AsyncJobStatus(BaseModel): + job_id: AsyncJobId + progress: ProgressReport + done: bool + + +class AsyncJobResult(BaseModel): + result: Any + + +class AsyncJobGet(BaseModel): + job_id: AsyncJobId + + +class AsyncJobAbort(BaseModel): + result: bool + job_id: AsyncJobId + + +class AsyncJobNameData(BaseModel): + """Data for controlling access to an async job""" + + user_id: UserID + product_name: str diff --git a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py new file mode 100644 index 00000000000..8403bdd2ff0 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py @@ -0,0 +1,31 @@ +from common_library.errors_classes import OsparcErrorMixin + + +class BaseAsyncjobRpcError(OsparcErrorMixin, RuntimeError): + pass + + +class JobSchedulerError(BaseAsyncjobRpcError): + msg_template: str = "Celery exception: {exc}" + + +class JobMissingError(BaseAsyncjobRpcError): + msg_template: str = "Job {job_id} does not exist" + + +class JobStatusError(BaseAsyncjobRpcError): + msg_template: str = "Could not get status of job {job_id}" + + +class JobNotDoneError(BaseAsyncjobRpcError): + msg_template: str = "Job {job_id} not done" + + +class JobAbortedError(BaseAsyncjobRpcError): + msg_template: str = "Job {job_id} aborted" + + +class JobError(BaseAsyncjobRpcError): + msg_template: str = ( + "Job {job_id} failed with exception type {exc_type} and message {exc_msg}" + ) diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py deleted file mode 100644 index cdbbeebffcd..00000000000 --- a/packages/models-library/src/models_library/api_schemas_storage.py +++ /dev/null @@ -1,334 +0,0 @@ -""" - Models used in storage API: - - Specifically services/storage/src/simcore_service_storage/api/v0/openapi.yaml#/components/schemas - - IMPORTANT: DO NOT COUPLE these schemas until storage is refactored -""" - -from datetime import datetime -from enum import Enum -from typing import Annotated, Any, Literal, Self, TypeAlias -from uuid import UUID - -from pydantic import ( - BaseModel, - ByteSize, - ConfigDict, - Field, - PositiveInt, - RootModel, - StringConstraints, - field_validator, - model_validator, -) -from pydantic.networks import AnyUrl - -from .basic_regex import DATCORE_DATASET_NAME_RE, S3_BUCKET_NAME_RE -from .basic_types import SHA256Str -from .generics import ListModel -from .projects_nodes_io import ( - LocationID, - LocationName, - NodeID, - SimcoreS3FileID, - StorageFileID, -) - -ETag: TypeAlias = str - -S3BucketName: TypeAlias = Annotated[str, StringConstraints(pattern=S3_BUCKET_NAME_RE)] - -DatCoreDatasetName: TypeAlias = Annotated[ - str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE) -] - - -# / -class HealthCheck(BaseModel): - name: str | None - status: str | None - api_version: str | None - version: str | None - - -# /locations -class FileLocation(BaseModel): - name: LocationName - id: LocationID - - model_config = ConfigDict( - extra="forbid", - json_schema_extra={ - "examples": [ - {"name": "simcore.s3", "id": 0}, - {"name": "datcore", "id": 1}, - ] - }, - ) - - -FileLocationArray: TypeAlias = ListModel[FileLocation] - - -# /locations/{location_id}/datasets -class DatasetMetaDataGet(BaseModel): - dataset_id: UUID | DatCoreDatasetName - display_name: str - model_config = ConfigDict( - extra="forbid", - from_attributes=True, - json_schema_extra={ - "examples": [ - # simcore dataset - { - "dataset_id": "74a84992-8c99-47de-b88a-311c068055ea", - "display_name": "api", - }, - { - "dataset_id": "1c46752c-b096-11ea-a3c4-02420a00392e", - "display_name": "Octave JupyterLab", - }, - { - "dataset_id": "2de04d1a-f346-11ea-9c22-02420a00085a", - "display_name": "Sleepers", - }, - # datcore datasets - { - "dataset_id": "N:dataset:be862eb8-861e-4b36-afc3-997329dd02bf", - "display_name": "simcore-testing-bucket", - }, - { - "dataset_id": "N:dataset:9ad8adb0-8ea2-4be6-bc45-ecbec7546393", - "display_name": "YetAnotherTest", - }, - ] - }, - ) - - -UNDEFINED_SIZE_TYPE: TypeAlias = Literal[-1] -UNDEFINED_SIZE: UNDEFINED_SIZE_TYPE = -1 - - -# /locations/{location_id}/files/metadata: -# /locations/{location_id}/files/{file_id}/metadata: -class FileMetaDataGet(BaseModel): - # Used by frontend - file_uuid: str = Field( - description="NOT a unique ID, like (api|uuid)/uuid/file_name or DATCORE folder structure", - ) - location_id: LocationID = Field(..., description="Storage location") - project_name: str | None = Field( - default=None, - description="optional project name, used by frontend to display path", - ) - node_name: str | None = Field( - default=None, - description="optional node name, used by frontend to display path", - ) - file_name: str = Field(..., description="Display name for a file") - file_id: StorageFileID = Field( - ..., - description="THIS IS the unique ID for the file. either (api|project_id)/node_id/file_name.ext for S3 and N:package:UUID for datcore", - ) - created_at: datetime - last_modified: datetime - file_size: UNDEFINED_SIZE_TYPE | ByteSize = Field( - default=UNDEFINED_SIZE, description="File size in bytes (-1 means invalid)" - ) - entity_tag: ETag | None = Field( - default=None, - description="Entity tag (or ETag), represents a specific version of the file, None if invalid upload or datcore", - ) - is_soft_link: bool = Field( - default=False, - description="If true, this file is a soft link." - "i.e. is another entry with the same object_name", - ) - is_directory: bool = Field(default=False, description="if True this is a directory") - sha256_checksum: SHA256Str | None = Field( - default=None, - description="SHA256 message digest of the file content. Main purpose: cheap lookup.", - ) - - model_config = ConfigDict( - extra="ignore", - from_attributes=True, - json_schema_extra={ - "examples": [ - # typical S3 entry - { - "created_at": "2020-06-17 12:28:55.705340", - "entity_tag": "8711cf258714b2de5498f5a5ef48cc7b", - "file_id": "1c46752c-b096-11ea-a3c4-02420a00392e/e603724d-4af1-52a1-b866-0d4b792f8c4a/work.zip", - "file_name": "work.zip", - "file_size": 17866343, - "file_uuid": "1c46752c-b096-11ea-a3c4-02420a00392e/e603724d-4af1-52a1-b866-0d4b792f8c4a/work.zip", - "is_soft_link": False, - "last_modified": "2020-06-22 13:48:13.398000+00:00", - "location_id": 0, - "node_name": "JupyterLab Octave", - "project_name": "Octave JupyterLab", - }, - # typical directory entry - { - "created_at": "2020-06-17 12:28:55.705340", - "entity_tag": "8711cf258714b2de5498f5a5ef48cc7b", - "file_id": "9a759caa-9890-4537-8c26-8edefb7a4d7c/be165f45-ddbf-4911-a04d-bc0b885914ef/workspace", - "file_name": "workspace", - "file_size": -1, - "file_uuid": "9a759caa-9890-4537-8c26-8edefb7a4d7c/be165f45-ddbf-4911-a04d-bc0b885914ef/workspace", - "is_soft_link": False, - "last_modified": "2020-06-22 13:48:13.398000+00:00", - "location_id": 0, - "node_name": None, - "project_name": None, - "is_directory": True, - }, - # api entry (not soft link) - { - "created_at": "2020-06-17 12:28:55.705340", - "entity_tag": "8711cf258714b2de5498f5a5ef48cc7b", - "file_id": "api/7b6b4e3d-39ae-3559-8765-4f815a49984e/tmpf_qatpzx", - "file_name": "tmpf_qatpzx", - "file_size": 86, - "file_uuid": "api/7b6b4e3d-39ae-3559-8765-4f815a49984e/tmpf_qatpzx", - "is_soft_link": False, - "last_modified": "2020-06-22 13:48:13.398000+00:00", - "location_id": 0, - "node_name": None, - "project_name": None, - }, - # api entry (soft link) - { - "created_at": "2020-06-17 12:28:55.705340", - "entity_tag": "36aa3644f526655a6f557207e4fd25b8", - "file_id": "api/6f788ad9-0ad8-3d0d-9722-72f08c24a212/output_data.json", - "file_name": "output_data.json", - "file_size": 183, - "file_uuid": "api/6f788ad9-0ad8-3d0d-9722-72f08c24a212/output_data.json", - "is_soft_link": True, - "last_modified": "2020-06-22 13:48:13.398000+00:00", - "location_id": 0, - "node_name": None, - "project_name": None, - }, - # datcore entry - { - "created_at": "2020-05-28T15:48:34.386302+00:00", - "entity_tag": None, - "file_id": "N:package:ce145b61-7e4f-470b-a113-033653e86d3d", - "file_name": "templatetemplate.json", - "file_size": 238, - "file_uuid": "Kember Cardiac Nerve Model/templatetemplate.json", - "is_soft_link": False, - "last_modified": "2020-05-28T15:48:37.507387+00:00", - "location_id": 1, - "node_name": None, - "project_name": None, - }, - ] - }, - ) - - @field_validator("location_id", mode="before") - @classmethod - def ensure_location_is_integer(cls, v): - if v is not None: - return int(v) - return v - - -class FileMetaDataArray(RootModel[list[FileMetaDataGet]]): - root: list[FileMetaDataGet] = Field(default_factory=list) - - -# /locations/{location_id}/files/{file_id} - - -class LinkType(str, Enum): - PRESIGNED = "PRESIGNED" - S3 = "S3" - - -class PresignedLink(BaseModel): - link: AnyUrl - - -class FileUploadLinks(BaseModel): - abort_upload: AnyUrl - complete_upload: AnyUrl - - -class FileUploadSchema(BaseModel): - chunk_size: ByteSize - urls: list[AnyUrl] - links: FileUploadLinks - - -class TableSynchronisation(BaseModel): - dry_run: bool | None = None - fire_and_forget: bool | None = None - removed: list[str] - - -# /locations/{location_id}/files/{file_id}:complete -class UploadedPart(BaseModel): - number: PositiveInt - e_tag: ETag - - -class FileUploadCompletionBody(BaseModel): - parts: list[UploadedPart] - - @field_validator("parts") - @classmethod - def ensure_sorted(cls, value: list[UploadedPart]) -> list[UploadedPart]: - return sorted(value, key=lambda uploaded_part: uploaded_part.number) - - -class FileUploadCompleteLinks(BaseModel): - state: AnyUrl - - -class FileUploadCompleteResponse(BaseModel): - links: FileUploadCompleteLinks - - -# /locations/{location_id}/files/{file_id}:complete/futures/{future_id} -class FileUploadCompleteState(Enum): - OK = "ok" - NOK = "nok" - - -class FileUploadCompleteFutureResponse(BaseModel): - state: FileUploadCompleteState - e_tag: ETag | None = Field(default=None) - - -# /simcore-s3/ - - -class FoldersBody(BaseModel): - source: Annotated[dict[str, Any], Field(default_factory=dict)] - destination: Annotated[dict[str, Any], Field(default_factory=dict)] - nodes_map: Annotated[dict[NodeID, NodeID], Field(default_factory=dict)] - - @model_validator(mode="after") - def ensure_consistent_entries(self: Self) -> Self: - source_node_keys = (NodeID(n) for n in self.source.get("workbench", {})) - if set(source_node_keys) != set(self.nodes_map.keys()): - msg = "source project nodes do not fit with nodes_map entries" - raise ValueError(msg) - destination_node_keys = ( - NodeID(n) for n in self.destination.get("workbench", {}) - ) - if set(destination_node_keys) != set(self.nodes_map.values()): - msg = "destination project nodes do not fit with nodes_map values" - raise ValueError(msg) - return self - - -class SoftCopyBody(BaseModel): - link_id: SimcoreS3FileID diff --git a/packages/models-library/src/models_library/api_schemas_storage/__init__.py b/packages/models-library/src/models_library/api_schemas_storage/__init__.py new file mode 100644 index 00000000000..912aa218e54 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_storage/__init__.py @@ -0,0 +1,9 @@ +from typing import Final + +from pydantic import TypeAdapter + +from ..rabbitmq_basic_types import RPCNamespace + +STORAGE_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter(RPCNamespace).validate_python( + "storage" +) diff --git a/packages/models-library/src/models_library/api_schemas_storage/data_export_async_jobs.py b/packages/models-library/src/models_library/api_schemas_storage/data_export_async_jobs.py new file mode 100644 index 00000000000..57a39c34ecb --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_storage/data_export_async_jobs.py @@ -0,0 +1,27 @@ +# pylint: disable=R6301 + +from common_library.errors_classes import OsparcErrorMixin +from models_library.projects_nodes_io import LocationID, StorageFileID +from pydantic import BaseModel, Field + + +class DataExportTaskStartInput(BaseModel): + location_id: LocationID + file_and_folder_ids: list[StorageFileID] = Field(..., min_length=1) + + +### Exceptions + + +class StorageRpcBaseError(OsparcErrorMixin, RuntimeError): + pass + + +class InvalidFileIdentifierError(StorageRpcBaseError): + msg_template: str = "Could not find the file {file_id}" + + +class AccessRightError(StorageRpcBaseError): + msg_template: str = ( + "User {user_id} does not have access to file {file_id} with location {location_id}" + ) diff --git a/packages/models-library/src/models_library/api_schemas_storage/storage_schemas.py b/packages/models-library/src/models_library/api_schemas_storage/storage_schemas.py new file mode 100644 index 00000000000..000db167a10 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_storage/storage_schemas.py @@ -0,0 +1,492 @@ +""" +Models used in storage API: + +Specifically services/storage/src/simcore_service_storage/api/v0/openapi.yaml#/components/schemas + +IMPORTANT: DO NOT COUPLE these schemas until storage is refactored +""" + +from datetime import datetime +from enum import Enum +from pathlib import Path +from typing import Annotated, Any, Final, Literal, Self, TypeAlias +from uuid import UUID + +from pydantic import ( + BaseModel, + ByteSize, + ConfigDict, + Field, + PositiveInt, + RootModel, + StringConstraints, + field_validator, + model_validator, +) +from pydantic.config import JsonDict +from pydantic.networks import AnyUrl + +from ..basic_regex import ( + DATCORE_COLLECTION_NAME_RE, + DATCORE_DATASET_NAME_RE, + DATCORE_FILE_ID_RE, + S3_BUCKET_NAME_RE, +) +from ..basic_types import SHA256Str +from ..generics import ListModel +from ..projects import ProjectID +from ..projects_nodes_io import ( + LocationID, + LocationName, + NodeID, + SimcoreS3FileID, + StorageFileID, +) +from ..users import UserID + +ETag: TypeAlias = str + +S3BucketName: TypeAlias = Annotated[str, StringConstraints(pattern=S3_BUCKET_NAME_RE)] + +DatCoreDatasetName: TypeAlias = Annotated[ + str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE) +] +DatCoreCollectionName: TypeAlias = Annotated[ + str, StringConstraints(pattern=DATCORE_COLLECTION_NAME_RE) +] +DatCorePackageName: TypeAlias = Annotated[ + str, StringConstraints(pattern=DATCORE_FILE_ID_RE) +] + + +# / +class HealthCheck(BaseModel): + name: str | None + status: str | None + api_version: str | None + version: str | None + + +# /locations +class FileLocation(BaseModel): + name: LocationName + id: LocationID + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + {"name": "simcore.s3", "id": 0}, + {"name": "datcore", "id": 1}, + ] + } + ) + + model_config = ConfigDict( + extra="forbid", + json_schema_extra=_update_json_schema_extra, + ) + + +FileLocationArray: TypeAlias = ListModel[FileLocation] + + +# /locations/{location_id}/datasets +class DatasetMetaDataGet(BaseModel): + dataset_id: UUID | DatCoreDatasetName + display_name: str + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # simcore dataset + { + "dataset_id": "74a84992-8c99-47de-b88a-311c068055ea", + "display_name": "api", + }, + { + "dataset_id": "1c46752c-b096-11ea-a3c4-02420a00392e", + "display_name": "Octave JupyterLab", + }, + { + "dataset_id": "2de04d1a-f346-11ea-9c22-02420a00085a", + "display_name": "Sleepers", + }, + # datcore datasets + { + "dataset_id": "N:dataset:be862eb8-861e-4b36-afc3-997329dd02bf", + "display_name": "simcore-testing-bucket", + }, + { + "dataset_id": "N:dataset:9ad8adb0-8ea2-4be6-bc45-ecbec7546393", + "display_name": "YetAnotherTest", + }, + ] + } + ) + + model_config = ConfigDict( + extra="forbid", + from_attributes=True, + json_schema_extra=_update_json_schema_extra, + ) + + +UNDEFINED_SIZE_TYPE: TypeAlias = Literal[-1] +UNDEFINED_SIZE: UNDEFINED_SIZE_TYPE = -1 + + +class FileMetaDataGetv010(BaseModel): + file_uuid: str + location_id: LocationID + location: LocationName + bucket_name: str + object_name: str + project_id: ProjectID | None + project_name: str | None + node_id: NodeID | None + node_name: str | None + file_name: str + user_id: UserID | None + user_name: str | None + + model_config = ConfigDict(extra="forbid", frozen=True) + + +class FileMetaDataGet(BaseModel): + # Used by frontend + file_uuid: str = Field( + description="NOT a unique ID, like (api|uuid)/uuid/file_name or DATCORE folder structure", + ) + location_id: LocationID = Field(..., description="Storage location") + project_name: str | None = Field( + default=None, + description="optional project name, used by frontend to display path", + ) + node_name: str | None = Field( + default=None, + description="optional node name, used by frontend to display path", + ) + file_name: str = Field(..., description="Display name for a file") + file_id: StorageFileID = Field( + ..., + description="THIS IS the unique ID for the file. either (api|project_id)/node_id/file_name.ext for S3 and N:package:UUID for datcore", + ) + created_at: datetime + last_modified: datetime + file_size: UNDEFINED_SIZE_TYPE | ByteSize = Field( + default=UNDEFINED_SIZE, description="File size in bytes (-1 means invalid)" + ) + entity_tag: ETag | None = Field( + default=None, + description="Entity tag (or ETag), represents a specific version of the file, None if invalid upload or datcore", + ) + is_soft_link: bool = Field( + default=False, + description="If true, this file is a soft link." + "i.e. is another entry with the same object_name", + ) + is_directory: bool = Field(default=False, description="if True this is a directory") + sha256_checksum: SHA256Str | None = Field( + default=None, + description="SHA256 message digest of the file content. Main purpose: cheap lookup.", + ) + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # typical S3 entry + { + "created_at": "2020-06-17 12:28:55.705340", + "entity_tag": "8711cf258714b2de5498f5a5ef48cc7b", + "file_id": "1c46752c-b096-11ea-a3c4-02420a00392e/e603724d-4af1-52a1-b866-0d4b792f8c4a/work.zip", + "file_name": "work.zip", + "file_size": 17866343, + "file_uuid": "1c46752c-b096-11ea-a3c4-02420a00392e/e603724d-4af1-52a1-b866-0d4b792f8c4a/work.zip", + "is_soft_link": False, + "last_modified": "2020-06-22 13:48:13.398000+00:00", + "location_id": 0, + "node_name": "JupyterLab Octave", + "project_name": "Octave JupyterLab", + }, + # typical directory entry + { + "created_at": "2020-06-17 12:28:55.705340", + "entity_tag": "8711cf258714b2de5498f5a5ef48cc7b", + "file_id": "9a759caa-9890-4537-8c26-8edefb7a4d7c/be165f45-ddbf-4911-a04d-bc0b885914ef/workspace", + "file_name": "workspace", + "file_size": -1, + "file_uuid": "9a759caa-9890-4537-8c26-8edefb7a4d7c/be165f45-ddbf-4911-a04d-bc0b885914ef/workspace", + "is_soft_link": False, + "last_modified": "2020-06-22 13:48:13.398000+00:00", + "location_id": 0, + "node_name": None, + "project_name": None, + "is_directory": True, + }, + # api entry (not soft link) + { + "created_at": "2020-06-17 12:28:55.705340", + "entity_tag": "8711cf258714b2de5498f5a5ef48cc7b", + "file_id": "api/7b6b4e3d-39ae-3559-8765-4f815a49984e/tmpf_qatpzx", + "file_name": "tmpf_qatpzx", + "file_size": 86, + "file_uuid": "api/7b6b4e3d-39ae-3559-8765-4f815a49984e/tmpf_qatpzx", + "is_soft_link": False, + "last_modified": "2020-06-22 13:48:13.398000+00:00", + "location_id": 0, + "node_name": None, + "project_name": None, + }, + # api entry (soft link) + { + "created_at": "2020-06-17 12:28:55.705340", + "entity_tag": "36aa3644f526655a6f557207e4fd25b8", + "file_id": "api/6f788ad9-0ad8-3d0d-9722-72f08c24a212/output_data.json", + "file_name": "output_data.json", + "file_size": 183, + "file_uuid": "api/6f788ad9-0ad8-3d0d-9722-72f08c24a212/output_data.json", + "is_soft_link": True, + "last_modified": "2020-06-22 13:48:13.398000+00:00", + "location_id": 0, + "node_name": None, + "project_name": None, + }, + # datcore entry + { + "created_at": "2020-05-28T15:48:34.386302+00:00", + "entity_tag": None, + "file_id": "N:package:ce145b61-7e4f-470b-a113-033653e86d3d", + "file_name": "templatetemplate.json", + "file_size": 238, + "file_uuid": "Kember Cardiac Nerve Model/templatetemplate.json", + "is_soft_link": False, + "last_modified": "2020-05-28T15:48:37.507387+00:00", + "location_id": 1, + "node_name": None, + "project_name": None, + }, + ] + } + ) + + model_config = ConfigDict( + extra="ignore", + from_attributes=True, + json_schema_extra=_update_json_schema_extra, + ) + + @field_validator("location_id", mode="before") + @classmethod + def ensure_location_is_integer(cls, v): + if v is not None: + return int(v) + return v + + +class FileMetaDataArray(RootModel[list[FileMetaDataGet]]): + root: list[FileMetaDataGet] = Field(default_factory=list) + + +class LinkType(str, Enum): + PRESIGNED = "PRESIGNED" + S3 = "S3" + + +class PresignedLink(BaseModel): + link: AnyUrl + + +class FileUploadLinks(BaseModel): + abort_upload: AnyUrl + complete_upload: AnyUrl + + +class FileUploadSchema(BaseModel): + chunk_size: ByteSize + urls: list[AnyUrl] + links: FileUploadLinks + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # typical S3 entry + { + "chunk_size": "10000000", + "urls": [ + "https://s3.amazonaws.com/bucket-name/key-name?AWSAccessKeyId=AKIAIOSFODNN7EXAMPLE&Expires=1698298164&Signature=WObYM%2F%2B4t7O3%2FZS3Kegb%2Bc4%3D", + ], + "links": { + "abort_upload": "https://storage.com:3021/bucket-name/key-name:abort", + "complete_upload": "https://storage.com:3021/bucket-name/key-name:complete", + }, + }, + ] + } + ) + + model_config = ConfigDict( + extra="forbid", json_schema_extra=_update_json_schema_extra + ) + + +class TableSynchronisation(BaseModel): + dry_run: bool | None = None + fire_and_forget: bool | None = None + removed: list[str] + + +# /locations/{location_id}/files/{file_id}:complete +class UploadedPart(BaseModel): + number: PositiveInt + e_tag: ETag + + +class FileUploadCompletionBody(BaseModel): + parts: list[UploadedPart] + + @field_validator("parts") + @classmethod + def ensure_sorted(cls, value: list[UploadedPart]) -> list[UploadedPart]: + return sorted(value, key=lambda uploaded_part: uploaded_part.number) + + +class FileUploadCompleteLinks(BaseModel): + state: AnyUrl + + +class FileUploadCompleteResponse(BaseModel): + links: FileUploadCompleteLinks + + +# /locations/{location_id}/files/{file_id}:complete/futures/{future_id} +class FileUploadCompleteState(Enum): + OK = "ok" + NOK = "nok" + + +class FileUploadCompleteFutureResponse(BaseModel): + state: FileUploadCompleteState + e_tag: ETag | None = Field(default=None) + + +# /simcore-s3/ + + +class FoldersBody(BaseModel): + source: Annotated[dict[str, Any], Field(default_factory=dict)] + destination: Annotated[dict[str, Any], Field(default_factory=dict)] + nodes_map: Annotated[dict[NodeID, NodeID], Field(default_factory=dict)] + + @model_validator(mode="after") + def ensure_consistent_entries(self: Self) -> Self: + source_node_keys = (NodeID(n) for n in self.source.get("workbench", {})) + if set(source_node_keys) != set(self.nodes_map.keys()): + msg = "source project nodes do not fit with nodes_map entries" + raise ValueError(msg) + destination_node_keys = ( + NodeID(n) for n in self.destination.get("workbench", {}) + ) + if set(destination_node_keys) != set(self.nodes_map.values()): + msg = "destination project nodes do not fit with nodes_map values" + raise ValueError(msg) + return self + + +class SoftCopyBody(BaseModel): + link_id: SimcoreS3FileID + + +DEFAULT_NUMBER_OF_PATHS_PER_PAGE: Final[int] = 50 +MAX_NUMBER_OF_PATHS_PER_PAGE: Final[int] = 1000 + + +class PathMetaDataGet(BaseModel): + path: Annotated[Path, Field(description="the path to the current path")] + display_path: Annotated[ + Path, + Field( + description="the path to display with UUID replaced (URL Encoded by parts as names may contain '/')" + ), + ] + + file_meta_data: Annotated[ + FileMetaDataGet | None, + Field(description="if filled, this is the file meta data of the s3 object"), + ] = None + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # ls no filter + { + "path": "f8da77a9-24b9-4eab-aee7-1f0608da1e3e", + "display_path": "my amazing project", + }, + # ls f8da77a9-24b9-4eab-aee7-1f0608da1e3e + { + "path": "f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7", + "display_path": "my amazing project/awesome node", + }, + # ls f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7 + { + "path": "f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7/outputs", + "display_path": "my amazing project/awesome node/outputs", + }, + # ls f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7/outputs + { + "path": "f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7/outputs/output5", + "display_path": "my amazing project/awesome node/outputs/output5", + }, + # ls f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7/outputs/output_5 + { + "path": f"f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7/outputs/output5/{FileMetaDataGet.model_json_schema()['examples'][0]['file_name']}", + "display_path": f"my amazing project/awesome node/outputs/output5/{FileMetaDataGet.model_json_schema()['examples'][0]['file_name']}", + "file_meta_data": FileMetaDataGet.model_json_schema()[ + "examples" + ][0], + }, + ] + } + ) + + model_config = ConfigDict( + extra="forbid", json_schema_extra=_update_json_schema_extra + ) + + +class PathTotalSizeCreate(BaseModel): + path: Path + size: ByteSize + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # a folder + { + "path": "f8da77a9-24b9-4eab-aee7-1f0608da1e3e", + "size": 15728640, + }, + # 1 file + { + "path": f"f8da77a9-24b9-4eab-aee7-1f0608da1e3e/2f94f80f-633e-4dfa-a983-226b7babe3d7/outputs/output5/{FileMetaDataGet.model_json_schema()['examples'][0]['file_name']}", + "size": 1024, + }, + ] + } + ) + + model_config = ConfigDict( + extra="forbid", json_schema_extra=_update_json_schema_extra + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/_base.py b/packages/models-library/src/models_library/api_schemas_webserver/_base.py index a5eaa42c006..4dfcf1473dd 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/_base.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/_base.py @@ -13,6 +13,13 @@ class EmptyModel(BaseModel): model_config = ConfigDict(extra="forbid") + def to_domain_model(self) -> dict[str, Any]: + return self.model_dump( + exclude_unset=True, + by_alias=True, + exclude_none=True, + ) + class InputSchemaWithoutCamelCase(BaseModel): model_config = ConfigDict( diff --git a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py index c6f56597327..7b490ad338e 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py @@ -1,6 +1,7 @@ -from typing import Any, TypeAlias +from typing import Annotated, TypeAlias from pydantic import ConfigDict, Field +from pydantic.config import JsonDict from pydantic.main import BaseModel from ..api_schemas_catalog import services as api_schemas_catalog_services @@ -31,235 +32,239 @@ class _BaseCommonApiExtension(BaseModel): class ServiceInputGet(ServiceInput, _BaseCommonApiExtension): """Extends fields of api_schemas_catalog.services.ServiceGet.outputs[*]""" - key_id: ServiceInputKey = Field( - ..., description="Unique name identifier for this input" - ) + key_id: Annotated[ + ServiceInputKey, Field(description="Unique name identifier for this input") + ] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "displayOrder": 2, + "label": "Sleep Time", + "description": "Time to wait before completion", + "type": "number", + "defaultValue": 0, + "unit": "second", + "widget": {"type": "TextArea", "details": {"minHeight": 1}}, + "keyId": "input_2", + "unitLong": "seconds", + "unitShort": "sec", + }, + "examples": [ + { + "label": "Acceleration", + "description": "acceleration with units", + "type": "ref_contentSchema", + "contentSchema": { + "title": "Acceleration", + "type": "number", + "x_unit": "m/s**2", + }, + "keyId": "input_1", + "unitLong": "meter/second3", + "unitShort": "m/s3", + } + ], + } + ) model_config = ConfigDict( - json_schema_extra={ - "example": { - "displayOrder": 2, - "label": "Sleep Time", - "description": "Time to wait before completion", - "type": "number", - "defaultValue": 0, - "unit": "second", - "widget": {"type": "TextArea", "details": {"minHeight": 1}}, - "keyId": "input_2", - "unitLong": "seconds", - "unitShort": "sec", - }, - "examples": [ - # uses content-schema - { - "label": "Acceleration", - "description": "acceleration with units", - "type": "ref_contentSchema", - "contentSchema": { - "title": "Acceleration", - "type": "number", - "x_unit": "m/s**2", - }, - "keyId": "input_1", - "unitLong": "meter/second3", - "unitShort": "m/s3", - } - ], - } + json_schema_extra=_update_json_schema_extra, ) class ServiceOutputGet(ServiceOutput, _BaseCommonApiExtension): """Extends fields of api_schemas_catalog.services.ServiceGet.outputs[*]""" - key_id: ServiceOutputKey = Field( - ..., description="Unique name identifier for this input" - ) + key_id: Annotated[ + ServiceOutputKey, Field(description="Unique name identifier for this input") + ] - model_config = ConfigDict( - json_schema_extra={ - "example": { - "displayOrder": 2, - "label": "Time Slept", - "description": "Time the service waited before completion", - "type": "number", - "unit": "second", - "unitLong": "seconds", - "unitShort": "sec", - "keyId": "output_2", + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "displayOrder": 2, + "label": "Time Slept", + "description": "Time the service waited before completion", + "type": "number", + "unit": "second", + "unitLong": "seconds", + "unitShort": "sec", + "keyId": "output_2", + } } - } + ) + + model_config = ConfigDict( + json_schema_extra=_update_json_schema_extra, ) ServiceInputsGetDict: TypeAlias = dict[ServicePortKey, ServiceInputGet] ServiceOutputsGetDict: TypeAlias = dict[ServicePortKey, ServiceOutputGet] +ServiceResourcesGet: TypeAlias = api_schemas_catalog_services.ServiceResourcesGet -_EXAMPLE_FILEPICKER: dict[str, Any] = { - **api_schemas_catalog_services.ServiceGet.model_config["json_schema_extra"]["examples"][1], # type: ignore [index,dict-item] - "inputs": {}, - "outputs": { - "outFile": { - "displayOrder": 0, - "label": "File", - "description": "Chosen File", - "type": "data:*/*", - "fileToKeyMap": None, - "keyId": "outFile", - } - }, -} - -_EXAMPLE_SLEEPER: dict[str, Any] = { - **api_schemas_catalog_services.ServiceGet.model_config["json_schema_extra"]["examples"][0], # type: ignore[index,dict-item] - "inputs": { - "input_1": { - "displayOrder": 1, - "label": "File with int number", - "description": "Pick a file containing only one integer", - "type": "data:text/plain", - "fileToKeyMap": {"single_number.txt": "input_1"}, - "keyId": "input_1", - }, - "input_2": { - "unitLong": "second", - "unitShort": "s", - "label": "Sleep interval", - "description": "Choose an amount of time to sleep in range [0:]", - "keyId": "input_2", - "displayOrder": 2, - "type": "ref_contentSchema", - "contentSchema": { - "title": "Sleep interval", - "type": "integer", - "x_unit": "second", - "minimum": 0, - }, - "defaultValue": 2, - }, - "input_3": { - "displayOrder": 3, - "label": "Fail after sleep", - "description": "If set to true will cause service to fail after it sleeps", - "type": "boolean", - "defaultValue": False, - "keyId": "input_3", - }, - "input_4": { - "unitLong": "meter", - "unitShort": "m", - "label": "Distance to bed", - "description": "It will first walk the distance to bed", - "keyId": "input_4", - "displayOrder": 4, - "type": "ref_contentSchema", - "contentSchema": { - "title": "Distance to bed", - "type": "integer", - "x_unit": "meter", - }, - "defaultValue": 0, - }, - "input_5": { - "unitLong": "byte", - "unitShort": "B", - "label": "Dream (or nightmare) of the night", - "description": "Defines the size of the dream that will be generated [0:]", - "keyId": "input_5", - "displayOrder": 5, - "type": "ref_contentSchema", - "contentSchema": { - "title": "Dream of the night", - "type": "integer", - "x_unit": "byte", - "minimum": 0, - }, - "defaultValue": 0, - }, - }, - "outputs": { - "output_1": { - "displayOrder": 1, - "label": "File containing one random integer", - "description": "Integer is generated in range [1-9]", - "type": "data:text/plain", - "fileToKeyMap": {"single_number.txt": "output_1"}, - "keyId": "output_1", - }, - "output_2": { - "unitLong": "second", - "unitShort": "s", - "label": "Random sleep interval", - "description": "Interval is generated in range [1-9]", - "keyId": "output_2", - "displayOrder": 2, - "type": "ref_contentSchema", - "contentSchema": { - "title": "Random sleep interval", - "type": "integer", - "x_unit": "second", - }, - }, - "output_3": { - "displayOrder": 3, - "label": "Dream output", - "description": "Contains some random data representing a dream", - "type": "data:text/plain", - "fileToKeyMap": {"dream.txt": "output_3"}, - "keyId": "output_3", - }, - }, -} +class CatalogLatestServiceGet(api_schemas_catalog_services.LatestServiceGet): + inputs: ServiceInputsGetDict # type: ignore[assignment] + outputs: ServiceOutputsGetDict # type: ignore[assignment] + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + base_example = ( + api_schemas_catalog_services.LatestServiceGet.model_json_schema()[ + "examples" + ][0] + ) -class ServiceGet(api_schemas_catalog_services.ServiceGet): - # pylint: disable=too-many-ancestors - inputs: ServiceInputsGetDict = Field( # type: ignore[assignment] - ..., description="inputs with extended information" - ) - outputs: ServiceOutputsGetDict = Field( # type: ignore[assignment] - ..., description="outputs with extended information" - ) + schema.update( + { + "example": { + **base_example, + "inputs": { + "input_1": { + "displayOrder": 1, + "label": "File with int number", + "description": "Pick a file containing only one integer", + "type": "data:text/plain", + "fileToKeyMap": {"single_number.txt": "input_1"}, + "keyId": "input_1", + }, + "input_2": { + "unitLong": "second", + "unitShort": "s", + "label": "Sleep interval", + "description": "Choose an amount of time to sleep in range [0:]", + "keyId": "input_2", + "displayOrder": 2, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Sleep interval", + "type": "integer", + "x_unit": "second", + "minimum": 0, + }, + "defaultValue": 2, + }, + "input_3": { + "displayOrder": 3, + "label": "Fail after sleep", + "description": "If set to true will cause service to fail after it sleeps", + "type": "boolean", + "defaultValue": False, + "keyId": "input_3", + }, + "input_4": { + "unitLong": "meter", + "unitShort": "m", + "label": "Distance to bed", + "description": "It will first walk the distance to bed", + "keyId": "input_4", + "displayOrder": 4, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Distance to bed", + "type": "integer", + "x_unit": "meter", + }, + "defaultValue": 0, + }, + "input_5": { + "unitLong": "byte", + "unitShort": "B", + "label": "Dream (or nightmare) of the night", + "description": "Defines the size of the dream that will be generated [0:]", + "keyId": "input_5", + "displayOrder": 5, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Dream of the night", + "type": "integer", + "x_unit": "byte", + "minimum": 0, + }, + "defaultValue": 0, + }, + }, + "outputs": { + "output_1": { + "displayOrder": 1, + "label": "File containing one random integer", + "description": "Integer is generated in range [1-9]", + "type": "data:text/plain", + "fileToKeyMap": {"single_number.txt": "output_1"}, + "keyId": "output_1", + }, + "output_2": { + "unitLong": "second", + "unitShort": "s", + "label": "Random sleep interval", + "description": "Interval is generated in range [1-9]", + "keyId": "output_2", + "displayOrder": 2, + "type": "ref_contentSchema", + "contentSchema": { + "title": "Random sleep interval", + "type": "integer", + "x_unit": "second", + }, + }, + "output_3": { + "displayOrder": 3, + "label": "Dream output", + "description": "Contains some random data representing a dream", + "type": "data:text/plain", + "fileToKeyMap": {"dream.txt": "output_3"}, + "keyId": "output_3", + }, + }, + } + } + ) model_config = ConfigDict( **OutputSchema.model_config, - json_schema_extra={"examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]}, + json_schema_extra=_update_json_schema_extra, ) -ServiceResourcesGet: TypeAlias = api_schemas_catalog_services.ServiceResourcesGet - - class CatalogServiceGet(api_schemas_catalog_services.ServiceGetV2): - # NOTE: will replace ServiceGet! - # pylint: disable=too-many-ancestors - inputs: ServiceInputsGetDict = Field( # type: ignore[assignment] - ..., description="inputs with extended information" - ) - outputs: ServiceOutputsGetDict = Field( # type: ignore[assignment] - ..., description="outputs with extended information" - ) + inputs: Annotated[ # type: ignore[assignment] + ServiceInputsGetDict, Field(description="inputs with extended information") + ] + outputs: Annotated[ # type: ignore[assignment] + ServiceOutputsGetDict, Field(description="outputs with extended information") + ] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + **api_schemas_catalog_services.ServiceGetV2.model_json_schema()[ + "examples" + ][0], + "inputs": { + f"input{i}": example + for i, example in enumerate( + ServiceInputGet.model_json_schema()["examples"] + ) + }, + "outputs": { + "outFile": ServiceOutputGet.model_json_schema()["example"] + }, + } + } + ) model_config = ConfigDict( **OutputSchema.model_config, - json_schema_extra={ - "example": { - **api_schemas_catalog_services.ServiceGetV2.model_config["json_schema_extra"]["examples"][0], # type: ignore [index,dict-item] - "inputs": { - f"input{i}": example - for i, example in enumerate( - ServiceInputGet.model_config["json_schema_extra"]["examples"] # type: ignore[index,arg-type] - ) - }, - "outputs": { - "outFile": ServiceOutputGet.model_config["json_schema_extra"][ - "example" - ] # type: ignore[index] - }, - } - }, + json_schema_extra=_update_json_schema_extra, ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/groups.py b/packages/models-library/src/models_library/api_schemas_webserver/groups.py index 4755e9c90af..643c66b817a 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/groups.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/groups.py @@ -244,33 +244,31 @@ def from_domain_model( GroupGet.from_domain_model(*gi) for gi in groups_by_type.standard ], all=GroupGet.from_domain_model(*groups_by_type.everyone), - product=GroupGet.from_domain_model(*my_product_group) - if my_product_group - else None, + product=( + GroupGet.from_domain_model(*my_product_group) + if my_product_group + else None + ), ) class GroupUserGet(OutputSchemaWithoutCamelCase): - # Identifiers id: Annotated[UserID | None, Field(description="the user's id")] = None - user_name: Annotated[UserNameID, Field(alias="userName")] + user_name: Annotated[ + UserNameID | None, Field(alias="userName", description="None if private") + ] = None gid: Annotated[ GroupID | None, Field(description="the user primary gid"), ] = None - # Private Profile login: Annotated[ LowerCaseEmailStr | None, - Field(description="the user's email, if privacy settings allows"), - ] = None - first_name: Annotated[ - str | None, Field(description="If privacy settings allows") - ] = None - last_name: Annotated[ - str | None, Field(description="If privacy settings allows") + Field(description="the user's email or None if private"), ] = None + first_name: Annotated[str | None, Field(description="None if private")] = None + last_name: Annotated[str | None, Field(description="None if private")] = None gravatar_id: Annotated[ str | None, Field(description="the user gravatar id hash", deprecated=True) ] = None @@ -309,6 +307,11 @@ class GroupUserGet(OutputSchemaWithoutCamelCase): "userName": "mrprivate", "gid": "55", }, + # very private user + { + "id": "6", + "gid": "55", + }, { "id": "56", "userName": "mrpublic", diff --git a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py index dec323199ff..616c81a1859 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items.py @@ -1,39 +1,89 @@ -from datetime import datetime -from typing import Any, NamedTuple, cast +from datetime import date, datetime +from typing import Literal, NamedTuple, NotRequired, Self, cast -from models_library.licensed_items import ( +from models_library.basic_types import IDStr +from models_library.resource_tracker import PricingPlanId +from pydantic import BaseModel, ConfigDict, HttpUrl, PositiveInt +from pydantic.config import JsonDict +from typing_extensions import TypedDict + +from ..licenses import ( VIP_DETAILS_EXAMPLE, + FeaturesDict, + LicensedItem, LicensedItemID, + LicensedItemKey, + LicensedItemVersion, LicensedResourceType, ) -from models_library.resource_tracker import PricingPlanId -from models_library.utils.common_validators import to_camel_recursive -from pydantic import AfterValidator, BaseModel, ConfigDict, PositiveInt -from pydantic.config import JsonDict -from typing_extensions import Annotated - from ._base import OutputSchema # RPC +class LicensedResourceSourceFeaturesDict(TypedDict): + age: NotRequired[str] + date: date + ethnicity: NotRequired[str] + functionality: NotRequired[str] + height: NotRequired[str] + name: NotRequired[str] + sex: NotRequired[str] + species: NotRequired[str] + version: NotRequired[str] + weight: NotRequired[str] + + +class LicensedResourceSource(BaseModel): + id: int + description: str + thumbnail: str + features: LicensedResourceSourceFeaturesDict + doi: str | None + license_key: str + license_version: str + protection: Literal["Code", "PayPal"] + available_from_url: HttpUrl | None + + +class LicensedResource(BaseModel): + source: LicensedResourceSource + category_id: IDStr + category_display: str + terms_of_use_url: HttpUrl | None = None + + class LicensedItemRpcGet(BaseModel): licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion display_name: str licensed_resource_type: LicensedResourceType - licensed_resource_data: dict[str, Any] + licensed_resources: list[LicensedResource] pricing_plan_id: PricingPlanId + is_hidden_on_market: bool created_at: datetime modified_at: datetime + model_config = ConfigDict( json_schema_extra={ "examples": [ { "licensed_item_id": "0362b88b-91f8-4b41-867c-35544ad1f7a1", + "key": "Duke", + "version": "1.0.0", "display_name": "best-model", "licensed_resource_type": f"{LicensedResourceType.VIP_MODEL}", - "licensed_resource_data": cast(JsonDict, VIP_DETAILS_EXAMPLE), + "licensed_resources": [ + { + "source": cast(JsonDict, VIP_DETAILS_EXAMPLE), + "category_id": "HumanWholeBody", + "category_display": "Humans", + "terms_of_use_url": None, + } + ], "pricing_plan_id": "15", + "is_hidden_on_market": False, "created_at": "2024-12-12 09:59:26.422140", "modified_at": "2024-12-12 09:59:26.422140", } @@ -50,33 +100,96 @@ class LicensedItemRpcGetPage(NamedTuple): # Rest +class _ItisVipRestData(OutputSchema): + id: int + description: str + thumbnail: str + features: FeaturesDict # NOTE: here there is a bit of coupling with domain model + doi: str | None + license_version: str + + +class _ItisVipResourceRestData(OutputSchema): + source: _ItisVipRestData + + class LicensedItemRestGet(OutputSchema): licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion + display_name: str licensed_resource_type: LicensedResourceType - licensed_resource_data: Annotated[ - dict[str, Any], AfterValidator(to_camel_recursive) - ] + licensed_resources: list[_ItisVipResourceRestData] pricing_plan_id: PricingPlanId + category_id: IDStr + category_display: str + category_icon: HttpUrl | None = None # NOTE: Placeholder until provide @odeimaiz + terms_of_use_url: HttpUrl | None = None # NOTE: Placeholder until provided @mguidon + created_at: datetime modified_at: datetime - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "licensed_item_id": "0362b88b-91f8-4b41-867c-35544ad1f7a1", - "display_name": "best-model", - "licensed_resource_type": f"{LicensedResourceType.VIP_MODEL}", - "licensed_resource_data": cast(JsonDict, VIP_DETAILS_EXAMPLE), - "pricing_plan_id": "15", - "created_at": "2024-12-12 09:59:26.422140", - "modified_at": "2024-12-12 09:59:26.422140", - } - ] - } - ) + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "licensedItemId": "0362b88b-91f8-4b41-867c-35544ad1f7a1", + "key": "Duke", + "version": "1.0.0", + "displayName": "my best model", + "licensedResourceType": f"{LicensedResourceType.VIP_MODEL}", + "licensedResources": [ + cast( + JsonDict, + { + "source": {**VIP_DETAILS_EXAMPLE, "doi": doi}, + }, + ) + ], + "pricingPlanId": "15", + "categoryId": "HumanWholeBody", + "categoryDisplay": "Humans", + "createdAt": "2024-12-12 09:59:26.422140", + "modifiedAt": "2024-12-12 09:59:26.422140", + } + for doi in ["10.1000/xyz123", None] + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + @classmethod + def from_domain_model(cls, item: LicensedItem) -> Self: + return cls.model_validate( + { + **item.model_dump( + include={ + "licensed_item_id", + "key", + "version", + "display_name", + "licensed_resource_type", + "pricing_plan_id", + "created_at", + "modified_at", + }, + exclude_unset=True, + ), + "licensed_resources": [ + _ItisVipResourceRestData(**x) for x in item.licensed_resources + ], + "category_id": item.licensed_resources[0]["category_id"], + "category_display": item.licensed_resources[0]["category_display"], + "terms_of_use_url": item.licensed_resources[0].get( + "terms_of_use_url", None + ), + } + ) class LicensedItemRestGetPage(NamedTuple): diff --git a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_checkouts.py b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_checkouts.py index 5b2c5e6464e..38e1f11ba28 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_checkouts.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_checkouts.py @@ -4,7 +4,7 @@ from models_library.emails import LowerCaseEmailStr from pydantic import BaseModel, ConfigDict, PositiveInt -from ..licensed_items import LicensedItemID +from ..licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from ..products import ProductName from ..resource_tracker_licensed_items_checkouts import LicensedItemCheckoutID from ..users import UserID @@ -17,6 +17,8 @@ class LicensedItemCheckoutRpcGet(BaseModel): licensed_item_checkout_id: LicensedItemCheckoutID licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID user_id: UserID product_name: ProductName @@ -29,6 +31,8 @@ class LicensedItemCheckoutRpcGet(BaseModel): { "licensed_item_checkout_id": "633ef980-6f3e-4b1a-989a-bd77bf9a5d6b", "licensed_item_id": "0362b88b-91f8-4b41-867c-35544ad1f7a1", + "key": "Duke", + "version": "1.0.0", "wallet_id": 6, "user_id": 27845, "product_name": "osparc", @@ -52,6 +56,8 @@ class LicensedItemCheckoutRpcGetPage(NamedTuple): class LicensedItemCheckoutRestGet(OutputSchema): licensed_item_checkout_id: LicensedItemCheckoutID licensed_item_id: LicensedItemID + key: str + version: str wallet_id: WalletID user_id: UserID user_email: LowerCaseEmailStr diff --git a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py index 69e65577c90..139df916b25 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/licensed_items_purchases.py @@ -5,7 +5,7 @@ from models_library.emails import LowerCaseEmailStr from pydantic import PositiveInt -from ..licensed_items import LicensedItemID +from ..licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from ..products import ProductName from ..resource_tracker import PricingUnitCostId from ..resource_tracker_licensed_items_purchases import LicensedItemPurchaseID @@ -18,6 +18,8 @@ class LicensedItemPurchaseGet(OutputSchema): licensed_item_purchase_id: LicensedItemPurchaseID product_name: ProductName licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID pricing_unit_cost_id: PricingUnitCostId pricing_unit_cost: Decimal diff --git a/packages/models-library/src/models_library/api_schemas_webserver/product.py b/packages/models-library/src/models_library/api_schemas_webserver/product.py deleted file mode 100644 index 1e747c554fb..00000000000 --- a/packages/models-library/src/models_library/api_schemas_webserver/product.py +++ /dev/null @@ -1,130 +0,0 @@ -from datetime import datetime -from typing import Annotated, TypeAlias - -from pydantic import ( - ConfigDict, - Field, - HttpUrl, - NonNegativeFloat, - NonNegativeInt, - PlainSerializer, - PositiveInt, -) - -from ..basic_types import IDStr, NonNegativeDecimal -from ..emails import LowerCaseEmailStr -from ..products import ProductName -from ._base import InputSchema, OutputSchema - - -class GetCreditPrice(OutputSchema): - product_name: str - usd_per_credit: Annotated[ - NonNegativeDecimal, - PlainSerializer(float, return_type=NonNegativeFloat, when_used="json"), - ] | None = Field( - ..., - description="Price of a credit in USD. " - "If None, then this product's price is UNDEFINED", - ) - min_payment_amount_usd: NonNegativeInt | None = Field( - ..., - description="Minimum amount (included) in USD that can be paid for this product" - "Can be None if this product's price is UNDEFINED", - ) - - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "productName": "osparc", - "usdPerCredit": None, - "minPaymentAmountUsd": None, - }, - { - "productName": "osparc", - "usdPerCredit": "10", - "minPaymentAmountUsd": "10", - }, - ] - } - ) - - -class GetProductTemplate(OutputSchema): - id_: IDStr = Field(..., alias="id") - content: str - - -class UpdateProductTemplate(InputSchema): - content: str - - -class GetProduct(OutputSchema): - name: ProductName - display_name: str - short_name: str | None = Field( - default=None, description="Short display name for SMS" - ) - - vendor: dict | None = Field(default=None, description="vendor attributes") - issues: list[dict] | None = Field( - default=None, description="Reference to issues tracker" - ) - manuals: list[dict] | None = Field(default=None, description="List of manuals") - support: list[dict] | None = Field( - default=None, description="List of support resources" - ) - - login_settings: dict - max_open_studies_per_user: PositiveInt | None - is_payment_enabled: bool - credits_per_usd: NonNegativeDecimal | None - - templates: list[GetProductTemplate] = Field( - default_factory=list, - description="List of templates available to this product for communications (e.g. emails, sms, etc)", - ) - - -ExtraCreditsUsdRangeInt: TypeAlias = Annotated[int, Field(ge=0, lt=500)] - - -class GenerateInvitation(InputSchema): - guest: LowerCaseEmailStr - trial_account_days: PositiveInt | None = None - extra_credits_in_usd: ExtraCreditsUsdRangeInt | None = None - - -class InvitationGenerated(OutputSchema): - product_name: ProductName - issuer: str - guest: LowerCaseEmailStr - trial_account_days: PositiveInt | None = None - extra_credits_in_usd: PositiveInt | None = None - created: datetime - invitation_link: HttpUrl - - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "productName": "osparc", - "issuer": "john.doe", - "guest": "guest@example.com", - "trialAccountDays": 7, - "extraCreditsInUsd": 30, - "created": "2023-09-27T15:30:00", - "invitationLink": "https://example.com/invitation#1234", - }, - # w/o optional - { - "productName": "osparc", - "issuer": "john.doe@email.com", - "guest": "guest@example.com", - "created": "2023-09-27T15:30:00", - "invitationLink": "https://example.com/invitation#1234", - }, - ] - } - ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/products.py b/packages/models-library/src/models_library/api_schemas_webserver/products.py new file mode 100644 index 00000000000..61f03a2c5e9 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_webserver/products.py @@ -0,0 +1,181 @@ +from datetime import datetime +from decimal import Decimal +from typing import Annotated, Any, TypeAlias + +from common_library.basic_types import DEFAULT_FACTORY +from pydantic import ( + BaseModel, + ConfigDict, + Field, + HttpUrl, + NonNegativeFloat, + NonNegativeInt, + PlainSerializer, + PositiveInt, +) +from pydantic.config import JsonDict + +from ..basic_types import IDStr, NonNegativeDecimal +from ..emails import LowerCaseEmailStr +from ..products import ProductName +from ._base import InputSchema, OutputSchema + + +class CreditResultRpcGet(BaseModel): + product_name: ProductName + credit_amount: Decimal + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "product_name": "s4l", + "credit_amount": Decimal("15.5"), # type: ignore[dict-item] + }, + ] + } + ) + + model_config = ConfigDict( + json_schema_extra=_update_json_schema_extra, + ) + + +class CreditPriceGet(OutputSchema): + product_name: str + usd_per_credit: Annotated[ + Annotated[ + NonNegativeDecimal, + PlainSerializer(float, return_type=NonNegativeFloat, when_used="json"), + ] + | None, + Field( + description="Price of a credit in USD. " + "If None, then this product's price is UNDEFINED", + ), + ] + + min_payment_amount_usd: Annotated[ + NonNegativeInt | None, + Field( + description="Minimum amount (included) in USD that can be paid for this product" + "Can be None if this product's price is UNDEFINED", + ), + ] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "productName": "osparc", + "usdPerCredit": None, + "minPaymentAmountUsd": None, + }, + { + "productName": "osparc", + "usdPerCredit": "10", + "minPaymentAmountUsd": "10", + }, + ] + } + ) + + model_config = ConfigDict( + json_schema_extra=_update_json_schema_extra, + ) + + +class ProductTemplateGet(OutputSchema): + id_: Annotated[IDStr, Field(alias="id")] + content: str + + +class ProductGet(OutputSchema): + name: ProductName + display_name: str + short_name: Annotated[ + str | None, Field(description="Short display name for SMS") + ] = None + + vendor: Annotated[dict | None, Field(description="vendor attributes")] = None + issues: Annotated[ + list[dict] | None, Field(description="Reference to issues tracker") + ] = None + manuals: Annotated[list[dict] | None, Field(description="List of manuals")] = None + support: Annotated[ + list[dict] | None, Field(description="List of support resources") + ] = None + + login_settings: dict + max_open_studies_per_user: PositiveInt | None + is_payment_enabled: bool + credits_per_usd: NonNegativeDecimal | None + + templates: Annotated[ + list[ProductTemplateGet], + Field( + description="List of templates available to this product for communications (e.g. emails, sms, etc)", + default_factory=list, + ), + ] = DEFAULT_FACTORY + + +class ProductUIGet(OutputSchema): + product_name: ProductName + ui: Annotated[ + dict[str, Any], + Field(description="Front-end owned ui product configuration"), + ] + + +ExtraCreditsUsdRangeInt: TypeAlias = Annotated[int, Field(ge=0, lt=500)] + + +class InvitationGenerate(InputSchema): + guest: LowerCaseEmailStr + trial_account_days: PositiveInt | None = None + extra_credits_in_usd: ExtraCreditsUsdRangeInt | None = None + + +class InvitationGenerated(OutputSchema): + product_name: ProductName + issuer: str + guest: LowerCaseEmailStr + trial_account_days: PositiveInt | None = None + extra_credits_in_usd: PositiveInt | None = None + created: datetime + invitation_link: HttpUrl + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "productName": "osparc", + "issuer": "john.doe", + "guest": "guest@example.com", + "trialAccountDays": 7, + "extraCreditsInUsd": 30, + "created": "2023-09-27T15:30:00", + "invitationLink": "https://example.com/invitation#1234", + }, + # w/o optional + { + "productName": "osparc", + "issuer": "john.doe@email.com", + "guest": "guest@example.com", + "created": "2023-09-27T15:30:00", + "invitationLink": "https://example.com/invitation#1234", + }, + ] + } + ) + + model_config = ConfigDict( + json_schema_extra=_update_json_schema_extra, + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py index 27e5e722d4a..586993a5ec4 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py @@ -9,6 +9,7 @@ from datetime import datetime from typing import Annotated, Any, Literal, Self, TypeAlias +from common_library.basic_types import DEFAULT_FACTORY from common_library.dict_tools import remap_keys from pydantic import ( BeforeValidator, @@ -27,7 +28,6 @@ from ..projects import ClassifierID, DateTimeStr, NodesDict, ProjectID from ..projects_access import AccessRights, GroupIDStr from ..projects_state import ProjectState -from ..projects_ui import StudyUI from ..utils._original_fastapi_encoders import jsonable_encoder from ..utils.common_validators import ( empty_str_to_none_pre_validator, @@ -37,18 +37,28 @@ from ..workspaces import WorkspaceID from ._base import EmptyModel, InputSchema, OutputSchema from .permalinks import ProjectPermalink +from .projects_ui import StudyUI class ProjectCreateNew(InputSchema): uuid: ProjectID | None = None # NOTE: suggested uuid! but could be different! + + # display name: str description: str | None = None thumbnail: HttpUrl | None = None + workbench: NodesDict + access_rights: dict[GroupIDStr, AccessRights] - tags: list[int] = Field(default_factory=list) - classifiers: list[ClassifierID] = Field(default_factory=list) + + tags: Annotated[list[int], Field(default_factory=list)] = DEFAULT_FACTORY + classifiers: Annotated[ + list[ClassifierID], Field(default_factory=list) + ] = DEFAULT_FACTORY + ui: StudyUI | None = None + workspace_id: WorkspaceID | None = None folder_id: FolderID | None = None @@ -60,6 +70,13 @@ class ProjectCreateNew(InputSchema): null_or_none_str_to_none_validator ) + def to_domain_model(self) -> dict[str, Any]: + return self.model_dump( + exclude_unset=True, + by_alias=True, + exclude_none=True, + ) + # NOTE: based on OVERRIDABLE_DOCUMENT_KEYS class ProjectCopyOverride(InputSchema): @@ -72,36 +89,56 @@ class ProjectCopyOverride(InputSchema): empty_str_to_none_pre_validator ) + def to_domain_model(self) -> dict[str, Any]: + return self.model_dump( + exclude_unset=True, + by_alias=True, + exclude_none=True, + ) + class ProjectGet(OutputSchema): uuid: ProjectID + + # display name: str description: str thumbnail: HttpUrl | Literal[""] - creation_date: DateTimeStr - last_change_date: DateTimeStr + workbench: NodesDict + prj_owner: LowerCaseEmailStr access_rights: dict[GroupIDStr, AccessRights] - tags: list[int] - classifiers: list[ClassifierID] = Field( - default_factory=list, json_schema_extra={"default": []} - ) + + # state + creation_date: DateTimeStr + last_change_date: DateTimeStr state: ProjectState | None = None - ui: EmptyModel | StudyUI | None = None + trashed_at: datetime | None + trashed_by: Annotated[ + GroupID | None, Field(description="The primary gid of the user who trashed") + ] + + # labeling + tags: list[int] + classifiers: Annotated[ + list[ClassifierID], + Field(default_factory=list, json_schema_extra={"default": []}), + ] = DEFAULT_FACTORY + quality: Annotated[ dict[str, Any], Field(default_factory=dict, json_schema_extra={"default": {}}) - ] + ] = DEFAULT_FACTORY + + # front-end + ui: EmptyModel | StudyUI | None = None dev: dict | None + permalink: ProjectPermalink | None = None + workspace_id: WorkspaceID | None folder_id: FolderID | None - trashed_at: datetime | None - trashed_by: Annotated[ - GroupID | None, Field(description="The primary gid of the user who trashed") - ] - _empty_description = field_validator("description", mode="before")( none_to_empty_str_pre_validator ) @@ -110,9 +147,9 @@ class ProjectGet(OutputSchema): @classmethod def from_domain_model(cls, project_data: dict[str, Any]) -> Self: - trimmed_data = copy.copy(project_data) - # project_data["trashed_by"] is a UserID - # project_data["trashed_by_primary_gid"] is a GroupID + trimmed_data = copy.deepcopy(project_data) + # NOTE: project_data["trashed_by"] is a UserID + # NOTE: project_data["trashed_by_primary_gid"] is a GroupID trimmed_data.pop("trashed_by", None) trimmed_data.pop("trashedBy", None) @@ -137,43 +174,47 @@ class ProjectListItem(ProjectGet): class ProjectReplace(InputSchema): uuid: ProjectID + name: ShortTruncatedStr description: LongTruncatedStr thumbnail: Annotated[ HttpUrl | None, BeforeValidator(empty_str_to_none_pre_validator), - ] = Field(default=None) + ] = None + creation_date: DateTimeStr last_change_date: DateTimeStr workbench: NodesDict access_rights: dict[GroupIDStr, AccessRights] + tags: Annotated[ list[int] | None, Field(default_factory=list, json_schema_extra={"default": []}) - ] + ] = DEFAULT_FACTORY classifiers: Annotated[ list[ClassifierID] | None, Field(default_factory=list, json_schema_extra={"default": []}), - ] + ] = DEFAULT_FACTORY ui: StudyUI | None = None quality: Annotated[ dict[str, Any], Field(default_factory=dict, json_schema_extra={"default": {}}) - ] + ] = DEFAULT_FACTORY class ProjectPatch(InputSchema): - name: ShortTruncatedStr | None = Field(default=None) - description: LongTruncatedStr | None = Field(default=None) + name: ShortTruncatedStr | None = None + description: LongTruncatedStr | None = None thumbnail: Annotated[ HttpUrl | None, BeforeValidator(empty_str_to_none_pre_validator), PlainSerializer(lambda x: str(x) if x is not None else None), ] = None - access_rights: dict[GroupIDStr, AccessRights] | None = Field(default=None) - classifiers: list[ClassifierID] | None = Field(default=None) - dev: dict | None = Field(default=None) + + access_rights: dict[GroupIDStr, AccessRights] | None = None + classifiers: list[ClassifierID] | None = None + dev: dict | None = None ui: Annotated[ StudyUI | None, BeforeValidator(empty_str_to_none_pre_validator), @@ -182,8 +223,8 @@ class ProjectPatch(InputSchema): obj, exclude_unset=True, by_alias=False ) # For the sake of backward compatibility ), - ] = Field(default=None) - quality: dict[str, Any] | None = Field(default=None) + ] = None + quality: dict[str, Any] | None = None def to_domain_model(self) -> dict[str, Any]: return self.model_dump(exclude_unset=True, by_alias=False) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py index 69aafba0962..a8932553201 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py @@ -1,8 +1,12 @@ # mypy: disable-error-code=truthy-function from typing import Annotated, Any, Literal, TypeAlias +from models_library.groups import GroupID +from models_library.projects import ProjectID +from models_library.services_history import ServiceRelease from pydantic import ConfigDict, Field +from ..access_rights import ExecutableAccessRights from ..api_schemas_directorv2.dynamic_services import RetrieveDataOut from ..basic_types import PortInt from ..projects_nodes import InputID, InputsDict, PartialNode @@ -40,7 +44,7 @@ class NodePatch(InputSchemaWithoutCamelCase): ] inputs_required: Annotated[ list[InputID] | None, - Field(alias="inputsRequired"), + Field(alias="inputsRequired"), ] = None input_nodes: Annotated[ list[NodeID] | None, @@ -55,9 +59,9 @@ class NodePatch(InputSchemaWithoutCamelCase): ), ] = None boot_options: Annotated[BootOptions | None, Field(alias="bootOptions")] = None - outputs: dict[ - str, Any - ] | None = None # NOTE: it is used by frontend for File Picker + outputs: dict[str, Any] | None = ( + None # NOTE: it is used by frontend for File Picker + ) def to_domain_model(self) -> PartialNode: data = self.model_dump( @@ -197,3 +201,20 @@ class NodeRetrieve(InputSchemaWithoutCamelCase): class NodeRetrieved(RetrieveDataOut): model_config = OutputSchema.model_config + + +class NodeServiceGet(OutputSchema): + key: ServiceKey + release: ServiceRelease + owner: Annotated[ + GroupID | None, + Field( + description="Service owner primary group id or None if ownership still not defined" + ), + ] + my_access_rights: ExecutableAccessRights + + +class ProjectNodeServicesGet(OutputSchema): + project_uuid: ProjectID + services: list[NodeServiceGet] diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes_ui.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes_ui.py new file mode 100644 index 00000000000..a1e3b7755b1 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes_ui.py @@ -0,0 +1,14 @@ +from typing import Annotated, TypeAlias + +from pydantic import BaseModel, ConfigDict, Field, PlainSerializer +from pydantic_extra_types.color import Color + +from ..projects_nodes_layout import Position + +PositionUI: TypeAlias = Position + + +class MarkerUI(BaseModel): + color: Annotated[Color, PlainSerializer(Color.as_hex), Field(...)] + + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py new file mode 100644 index 00000000000..5da6824560a --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py @@ -0,0 +1,181 @@ +""" + Models Front-end UI +""" + +from typing import Annotated, Literal, NotRequired + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + HttpUrl, + PlainSerializer, + field_validator, +) +from pydantic.config import JsonDict +from pydantic_extra_types.color import Color +from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict + TypedDict, +) + +from ..projects_nodes_io import NodeID, NodeIDStr +from ..utils.common_validators import empty_str_to_none_pre_validator +from .projects_nodes_ui import MarkerUI, PositionUI + + +class WorkbenchUI(BaseModel): + position: Annotated[ + PositionUI, + Field(description="The node position in the workbench"), + ] + marker: MarkerUI | None = None + + model_config = ConfigDict(extra="forbid") + + +class SlideshowUI(TypedDict): + position: int + instructions: NotRequired[str | None] # Instructions about what to do in this step + + +class AnnotationUI(BaseModel): + type: Literal["note", "rect", "text"] + color: Annotated[Color, PlainSerializer(Color.as_hex)] + attributes: Annotated[dict, Field(description="svg attributes")] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "type": "note", + "color": "#FFFF00", + "attributes": { + "x": 415, + "y": 100, + "width": 117, + "height": 26, + "destinataryGid": 4, + "text": "ToDo", + }, + }, + { + "type": "rect", + "color": "#FF0000", + "attributes": {"x": 415, "y": 100, "width": 117, "height": 26}, + }, + { + "type": "text", + "color": "#0000FF", + "attributes": {"x": 415, "y": 100, "text": "Hey!"}, + }, + ] + }, + ) + + model_config = ConfigDict( + extra="forbid", json_schema_extra=_update_json_schema_extra + ) + + +class StudyUI(BaseModel): + # Model fully controlled by the UI and stored under `projects.ui` + icon: HttpUrl | None = None + + workbench: dict[NodeIDStr, WorkbenchUI] | None = None + slideshow: dict[NodeIDStr, SlideshowUI] | None = None + current_node_id: Annotated[NodeID | None, Field(alias="currentNodeId")] = None + annotations: dict[NodeIDStr, AnnotationUI] | None = None + + _empty_is_none = field_validator("*", mode="before")( + empty_str_to_none_pre_validator + ) + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "workbench": { + "801407c9-abb1-400d-ac49-35b0b2334a34": { + "position": {"x": 250, "y": 100} + } + } + }, + { + "icon": "https://cdn-icons-png.flaticon.com/512/25/25231.png", + "mode": "app", + "slideshow": { + "4b3345e5-861f-47b0-8b52-a4508449be79": { + "position": 1, + "instructions": None, + }, + "eaeee3dc-9ae1-4bf6-827e-798fd7cad848": { + "position": 0, + "instructions": None, + }, + }, + "workbench": { + "4b3345e5-861f-47b0-8b52-a4508449be79": { + "position": {"x": 460, "y": 260} + }, + "eaeee3dc-9ae1-4bf6-827e-798fd7cad848": { + "position": {"x": 220, "y": 600} + }, + }, + "annotations": { + "4375ae62-76ce-42a4-9cea-608a2ba74762": { + "type": "rect", + "color": "#650cff", + "attributes": { + "x": 79, + "y": 194, + "width": "320", + "height": "364", + }, + }, + "52567518-cedc-47e0-ad7f-6989fb8c5649": { + "type": "note", + "color": "#ffff01", + "attributes": { + "x": 151, + "y": 376, + "text": "ll", + "recipientGid": None, + }, + }, + "764a17c8-36d7-4865-a5cb-db9b4f82ce80": { + "type": "note", + "color": "#650cff", + "attributes": { + "x": 169, + "y": 19, + "text": "yeah m", + "recipientGid": 20630, + }, + }, + "cf94f068-259c-4192-89f9-b2a56d51249c": { + "type": "text", + "color": "#e9aeab", + "attributes": { + "x": 119, + "y": 223, + "text": "pppoo", + "color": "#E9AEAB", + "fontSize": 12, + }, + }, + }, + "current_node_id": "4b3345e5-861f-47b0-8b52-a4508449be79", + }, + ] + } + ) + + model_config = ConfigDict( + extra="allow", + populate_by_name=True, + json_schema_extra=_update_json_schema_extra, + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py index 253fce9f4bb..78e0c005abc 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py @@ -6,6 +6,7 @@ from ..projects import ProjectID from ..projects_nodes_io import NodeID from ..resource_tracker import ( + CreditTransactionStatus, HardwareInfo, PricingPlanClassification, PricingPlanId, @@ -13,7 +14,8 @@ PricingUnitId, ServiceRunStatus, SpecificInfo, - UnitExtraInfo, + UnitExtraInfoLicense, + UnitExtraInfoTier, ) from ..services import ServiceKey, ServiceVersion from ..services_types import ServiceRunID @@ -26,13 +28,15 @@ class ServiceRunGet( BaseModel -): # NOTE: this is already in use so I didnt modidy inheritance from OutputSchema +): # NOTE: this is already in use so I didnt modify inheritance from OutputSchema service_run_id: ServiceRunID wallet_id: WalletID | None wallet_name: str | None user_id: UserID + user_email: str project_id: ProjectID project_name: str + project_tags: list[str] node_id: NodeID node_name: str root_parent_project_id: ProjectID @@ -43,12 +47,15 @@ class ServiceRunGet( started_at: datetime stopped_at: datetime | None service_run_status: ServiceRunStatus + # Cost in credits + credit_cost: Decimal | None + transaction_status: CreditTransactionStatus | None class PricingUnitGet(OutputSchema): pricing_unit_id: PricingUnitId unit_name: str - unit_extra_info: UnitExtraInfo + unit_extra_info: UnitExtraInfoTier | UnitExtraInfoLicense current_cost_per_unit: Decimal default: bool @@ -114,7 +121,7 @@ class UpdatePricingPlanBodyParams(InputSchema): class CreatePricingUnitBodyParams(InputSchema): unit_name: str - unit_extra_info: UnitExtraInfo + unit_extra_info: UnitExtraInfoTier | UnitExtraInfoLicense default: bool specific_info: SpecificInfo cost_per_unit: Decimal @@ -128,7 +135,7 @@ class CreatePricingUnitBodyParams(InputSchema): class UpdatePricingUnitBodyParams(InputSchema): unit_name: str - unit_extra_info: UnitExtraInfo + unit_extra_info: UnitExtraInfoTier | UnitExtraInfoLicense default: bool specific_info: SpecificInfo pricing_unit_cost_update: PricingUnitCostUpdate | None = Field(default=None) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/storage.py b/packages/models-library/src/models_library/api_schemas_webserver/storage.py new file mode 100644 index 00000000000..3049bf4d0bd --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_webserver/storage.py @@ -0,0 +1,46 @@ +from pathlib import Path +from typing import Annotated + +from pydantic import BaseModel, Field + +from ..api_schemas_storage.data_export_async_jobs import DataExportTaskStartInput +from ..api_schemas_storage.storage_schemas import ( + DEFAULT_NUMBER_OF_PATHS_PER_PAGE, + MAX_NUMBER_OF_PATHS_PER_PAGE, +) +from ..projects_nodes_io import LocationID, StorageFileID +from ..rest_pagination import ( + CursorQueryParameters, +) +from ._base import InputSchema + + +class StorageLocationPathParams(BaseModel): + location_id: LocationID + + +class StoragePathComputeSizeParams(StorageLocationPathParams): + path: Path + + +class ListPathsQueryParams(InputSchema, CursorQueryParameters): + file_filter: Path | None = None + + size: Annotated[ + int, + Field( + description="maximum number of items to return (pagination)", + ge=1, + lt=MAX_NUMBER_OF_PATHS_PER_PAGE, + ), + ] = DEFAULT_NUMBER_OF_PATHS_PER_PAGE + + +class DataExportPost(InputSchema): + paths: list[StorageFileID] + + def to_rpc_schema(self, location_id: LocationID) -> DataExportTaskStartInput: + return DataExportTaskStartInput( + file_and_folder_ids=self.paths, + location_id=location_id, + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/users.py b/packages/models-library/src/models_library/api_schemas_webserver/users.py index 0f6e010a4a8..1facf8bb1e9 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/users.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/users.py @@ -16,6 +16,7 @@ ValidationInfo, field_validator, ) +from pydantic.config import JsonDict from ..basic_types import IDStr from ..emails import LowerCaseEmailStr @@ -46,11 +47,13 @@ class MyProfilePrivacyGet(OutputSchema): + hide_username: bool hide_fullname: bool hide_email: bool class MyProfilePrivacyPatch(InputSchema): + hide_username: bool | None = None hide_fullname: bool | None = None hide_email: bool | None = None @@ -79,23 +82,33 @@ class MyProfileGet(OutputSchemaWithoutCamelCase): privacy: MyProfilePrivacyGet preferences: AggregatedPreferences + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "id": 42, + "login": "bla@foo.com", + "userName": "bla42", + "role": "admin", # pre + "expirationDate": "2022-09-14", # optional + "preferences": {}, + "privacy": { + "hide_username": 0, + "hide_fullname": 0, + "hide_email": 1, + }, + }, + ] + } + ) + model_config = ConfigDict( # NOTE: old models have an hybrid between snake and camel cases! # Should be unified at some point populate_by_name=True, - json_schema_extra={ - "examples": [ - { - "id": 42, - "login": "bla@foo.com", - "userName": "bla42", - "role": "admin", # pre - "expirationDate": "2022-09-14", # optional - "preferences": {}, - "privacy": {"hide_fullname": 0, "hide_email": 1}, - }, - ] - }, + json_schema_extra=_update_json_schema_extra, ) @field_validator("role", mode="before") @@ -141,7 +154,7 @@ def from_domain_model( class MyProfilePatch(InputSchemaWithoutCamelCase): first_name: FirstNameStr | None = None last_name: LastNameStr | None = None - user_name: Annotated[IDStr | None, Field(alias="userName")] = None + user_name: Annotated[IDStr | None, Field(alias="userName", min_length=4)] = None privacy: MyProfilePrivacyPatch | None = None @@ -169,7 +182,7 @@ def _validate_user_name(cls, value: str): # Ensure it doesn't end with a special character if {value[0], value[-1]}.intersection({"_", "-", "."}): - msg = f"Username '{value}' cannot end or start with a special character." + msg = f"Username '{value}' cannot end with a special character." raise ValueError(msg) # Check reserved words (example list; extend as needed) @@ -215,7 +228,7 @@ class UserGet(OutputSchema): # Public profile of a user subject to its privacy settings user_id: UserID group_id: GroupID - user_name: UserNameID + user_name: UserNameID | None = None first_name: str | None = None last_name: str | None = None email: EmailStr | None = None diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index b65c0fd1fe1..33cff4a2f7d 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -1,8 +1,8 @@ -""" Regular expressions patterns to build pydantic contrained strings +"""Regular expressions patterns to build pydantic contrained strings - - Variants of the patterns with 'Named Groups' captured are suffixed with NG_RE +- Variants of the patterns with 'Named Groups' captured are suffixed with NG_RE - SEE tests_basic_regex.py for examples +SEE tests_basic_regex.py for examples """ # TODO: for every pattern we should have a formatter function # NOTE: some sites to manualy check ideas @@ -56,6 +56,7 @@ # Datcore file ID DATCORE_FILE_ID_RE = rf"^N:package:{UUID_RE_BASE}$" DATCORE_DATASET_NAME_RE = rf"^N:dataset:{UUID_RE_BASE}$" +DATCORE_COLLECTION_NAME_RE = rf"^N:collection:{UUID_RE_BASE}$" TWILIO_ALPHANUMERIC_SENDER_ID_RE = r"(?!^\d+$)^[a-zA-Z0-9\s]{2,11}$" diff --git a/packages/models-library/src/models_library/bytes_iters.py b/packages/models-library/src/models_library/bytes_iters.py new file mode 100644 index 00000000000..5ec9bb961f3 --- /dev/null +++ b/packages/models-library/src/models_library/bytes_iters.py @@ -0,0 +1,9 @@ +from collections.abc import AsyncIterable, Callable +from typing import TypeAlias + +from pydantic import ByteSize + +BytesIter: TypeAlias = AsyncIterable[bytes] + +BytesIterCallable: TypeAlias = Callable[[], BytesIter] +DataSize: TypeAlias = ByteSize diff --git a/packages/models-library/src/models_library/errors.py b/packages/models-library/src/models_library/errors.py index 26b4aa0d91d..cec882e12b7 100644 --- a/packages/models-library/src/models_library/errors.py +++ b/packages/models-library/src/models_library/errors.py @@ -36,6 +36,7 @@ class ErrorDict(_ErrorDictRequired, total=False): RABBITMQ_CLIENT_UNHEALTHY_MSG = "RabbitMQ client is in a bad state!" REDIS_CLIENT_UNHEALTHY_MSG = "Redis cannot be reached!" +DOCKER_API_PROXY_UNHEALTHY_MSG = "docker-api-proxy service is not reachable!" # NOTE: Here we do not just import as 'from pydantic.error_wrappers import ErrorDict' diff --git a/packages/models-library/src/models_library/groups.py b/packages/models-library/src/models_library/groups.py index c0d8692b2e7..d35b1de7dcc 100644 --- a/packages/models-library/src/models_library/groups.py +++ b/packages/models-library/src/models_library/groups.py @@ -9,8 +9,7 @@ TypedDict, ) -from .basic_types import IDStr -from .users import UserID +from .users import UserID, UserNameID from .utils.common_validators import create_enums_pre_validator EVERYONE_GROUP_ID: Final[int] = 1 @@ -99,10 +98,10 @@ class GroupsByTypeTuple(NamedTuple): class GroupMember(BaseModel): # identifiers id: UserID - name: IDStr primary_gid: GroupID # private profile + name: UserNameID | None email: EmailStr | None first_name: str | None last_name: str | None diff --git a/packages/models-library/src/models_library/licensed_items.py b/packages/models-library/src/models_library/licensed_items.py deleted file mode 100644 index cb25687bf6e..00000000000 --- a/packages/models-library/src/models_library/licensed_items.py +++ /dev/null @@ -1,74 +0,0 @@ -from datetime import datetime -from enum import auto -from typing import Any, TypeAlias -from uuid import UUID - -from pydantic import BaseModel, ConfigDict - -from .products import ProductName -from .resource_tracker import PricingPlanId -from .utils.enums import StrAutoEnum - -LicensedItemID: TypeAlias = UUID - - -class LicensedResourceType(StrAutoEnum): - VIP_MODEL = auto() - - -VIP_FEAUTES_EXAMPLE = { - "name": "Duke", - "version": "V2.0", - "sex": "Male", - "age": "34 years", - "weight": "70.2 Kg", - "height": "1.77 m", - "data": "2015-03-01", - "ethnicity": "Caucasian", - "functionality": "Static", - "additional_field": "allowed", -} - -VIP_DETAILS_EXAMPLE = { - "id": 1, - "description": "custom description", - "thumbnail": "custom description", - "features": VIP_FEAUTES_EXAMPLE, - "doi": "custom value", - "license_key": "custom value", - "license_version": "custom value", - "protection": "custom value", - "available_from_url": "custom value", - "additional_field": "allowed", -} - - -# -# DB -# - - -class LicensedItemDB(BaseModel): - licensed_item_id: LicensedItemID - display_name: str - - licensed_resource_name: str - licensed_resource_type: LicensedResourceType - licensed_resource_data: dict[str, Any] | None - - pricing_plan_id: PricingPlanId | None - product_name: ProductName | None - - # states - created: datetime - modified: datetime - trashed: datetime | None - - model_config = ConfigDict(from_attributes=True) - - -class LicensedItemUpdateDB(BaseModel): - display_name: str | None = None - licensed_resource_name: str | None = None - pricing_plan_id: PricingPlanId | None = None - trash: bool | None = None diff --git a/packages/models-library/src/models_library/licenses.py b/packages/models-library/src/models_library/licenses.py new file mode 100644 index 00000000000..b65b7f9d6fe --- /dev/null +++ b/packages/models-library/src/models_library/licenses.py @@ -0,0 +1,172 @@ +from datetime import date, datetime +from enum import auto +from typing import Annotated, Any, NamedTuple, NewType, NotRequired, TypeAlias, cast +from uuid import UUID + +from models_library.resource_tracker import PricingPlanId +from pydantic import BaseModel, ConfigDict, PositiveInt, StringConstraints +from pydantic.config import JsonDict +from typing_extensions import TypedDict + +from .products import ProductName +from .resource_tracker import PricingPlanId +from .utils.enums import StrAutoEnum + +LicensedItemID: TypeAlias = UUID +LicensedResourceID: TypeAlias = UUID + +LICENSED_ITEM_VERSION_RE = r"^\d+\.\d+\.\d+$" +LicensedItemKey = NewType("LicensedItemKey", str) +LicensedItemVersion = Annotated[ + str, StringConstraints(pattern=LICENSED_ITEM_VERSION_RE) +] + + +class LicensedResourceType(StrAutoEnum): + VIP_MODEL = auto() + + +_VIP_FEATURES_EXAMPLE = { + # NOTE: this view is how it would be after parsed and validated + "age": "34 years", + "date": "2015-03-01", + "ethnicity": "Caucasian", + "functionality": "Static", + "height": "1.77 m", + "name": "Duke", + "sex": "Male", + "version": "V2.0", + "weight": "70.2 Kg", + # other + "additional_field": "allowed", +} + + +class FeaturesDict(TypedDict): + # keep alphabetical + age: NotRequired[str] + date: date + ethnicity: NotRequired[str] + functionality: NotRequired[str] + height: NotRequired[str] + name: NotRequired[str] + sex: NotRequired[str] + species: NotRequired[str] + version: NotRequired[str] + weight: NotRequired[str] + + +VIP_DETAILS_EXAMPLE = { + "id": 1, + "description": "A detailed description of the VIP model", + "thumbnail": "https://example.com/thumbnail.jpg", + "features": _VIP_FEATURES_EXAMPLE, + "doi": "10.1000/xyz123", + "license_key": "ABC123XYZ", + "license_version": "1.0", + "protection": "Code", + "available_from_url": "https://example.com/download", + "additional_field": "trimmed if rest", +} + + +# +# DB +# + + +class LicensedItemDB(BaseModel): + licensed_item_id: LicensedItemID + display_name: str + + key: LicensedItemKey + version: LicensedItemVersion + licensed_resource_type: LicensedResourceType + + pricing_plan_id: PricingPlanId + product_name: ProductName + is_hidden_on_market: bool + + # states + created: datetime + modified: datetime + + model_config = ConfigDict(from_attributes=True) + + +class LicensedItemPatchDB(BaseModel): + display_name: str | None = None + pricing_plan_id: PricingPlanId | None = None + + +class LicensedResourceDB(BaseModel): + licensed_resource_id: LicensedResourceID + display_name: str + + licensed_resource_name: str + licensed_resource_type: LicensedResourceType + licensed_resource_data: dict[str, Any] | None + priority: int + + # states + created: datetime + modified: datetime + trashed: datetime | None + + model_config = ConfigDict(from_attributes=True) + + +class LicensedResourcePatchDB(BaseModel): + display_name: str | None = None + licensed_resource_name: str | None = None + trash: bool | None = None + + +class LicensedItem(BaseModel): + licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion + display_name: str + licensed_resource_type: LicensedResourceType + licensed_resources: list[dict[str, Any]] + pricing_plan_id: PricingPlanId + is_hidden_on_market: bool + created_at: datetime + modified_at: datetime + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "licensed_item_id": "0362b88b-91f8-4b41-867c-35544ad1f7a1", + "key": "Duke", + "version": "1.0.0", + "display_name": "my best model", + "licensed_resource_type": f"{LicensedResourceType.VIP_MODEL}", + "licensed_resources": [ + cast( + JsonDict, + { + "category_id": "HumanWholeBody", + "category_display": "Humans", + "source": VIP_DETAILS_EXAMPLE, + }, + ) + ], + "pricing_plan_id": "15", + "is_hidden_on_market": False, + "created_at": "2024-12-12 09:59:26.422140", + "modified_at": "2024-12-12 09:59:26.422140", + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + +class LicensedItemPage(NamedTuple): + total: PositiveInt + items: list[LicensedItem] diff --git a/packages/models-library/src/models_library/products.py b/packages/models-library/src/models_library/products.py index 51c44a83d47..d9f25a000f5 100644 --- a/packages/models-library/src/models_library/products.py +++ b/packages/models-library/src/models_library/products.py @@ -1,36 +1,5 @@ -from decimal import Decimal from typing import TypeAlias -from pydantic import BaseModel, ConfigDict, Field - ProductName: TypeAlias = str StripePriceID: TypeAlias = str StripeTaxRateID: TypeAlias = str - - -class CreditResultGet(BaseModel): - product_name: ProductName - credit_amount: Decimal = Field(..., description="") - - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - {"product_name": "s4l", "credit_amount": Decimal(15.5)}, # type: ignore[dict-item] - ] - } - ) - - -class ProductStripeInfoGet(BaseModel): - stripe_price_id: StripePriceID - stripe_tax_rate_id: StripeTaxRateID - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "stripe_price_id": "stripe-price-id", - "stripe_tax_rate_id": "stripe-tax-rate-id", - }, - ] - } - ) diff --git a/packages/models-library/src/models_library/progress_bar.py b/packages/models-library/src/models_library/progress_bar.py index 90232847bbc..ad8130570e5 100644 --- a/packages/models-library/src/models_library/progress_bar.py +++ b/packages/models-library/src/models_library/progress_bar.py @@ -2,14 +2,12 @@ from pydantic import BaseModel, ConfigDict -from .basic_types import IDStr - # NOTE: keep a list of possible unit, and please use correct official unit names ProgressUnit: TypeAlias = Literal["Byte"] class ProgressStructuredMessage(BaseModel): - description: IDStr + description: str current: float total: int unit: str | None = None @@ -51,6 +49,7 @@ class ProgressStructuredMessage(BaseModel): class ProgressReport(BaseModel): actual_value: float total: float = 1.0 + attempt: int = 0 unit: ProgressUnit | None = UNITLESS message: ProgressStructuredMessage | None = None diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 50f38673e4b..fd5e58849aa 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -20,7 +20,6 @@ from .projects_nodes import Node from .projects_nodes_io import NodeIDStr from .projects_state import ProjectState -from .projects_ui import StudyUI from .users import UserID from .utils.common_validators import ( empty_str_to_none_pre_validator, @@ -54,33 +53,41 @@ class ProjectType(str, Enum): class BaseProjectModel(BaseModel): # Description of the project - uuid: ProjectID = Field( - ..., - description="project unique identifier", - examples=[ - "07640335-a91f-468c-ab69-a374fa82078d", - "9bcf8feb-c1b1-41b6-b201-639cd6ccdba8", - ], - ) - name: str = Field( - ..., description="project name", examples=["Temporal Distortion Simulator"] - ) - description: str = Field( - ..., - description="longer one-line description about the project", - examples=["Dabbling in temporal transitions ..."], - ) - thumbnail: HttpUrl | None = Field( - ..., - description="url of the project thumbnail", - examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"], - ) + uuid: Annotated[ + ProjectID, + Field( + description="project unique identifier", + examples=[ + "07640335-a91f-468c-ab69-a374fa82078d", + "9bcf8feb-c1b1-41b6-b201-639cd6ccdba8", + ], + ), + ] + + name: Annotated[ + str, + Field(description="project name", examples=["Temporal Distortion Simulator"]), + ] + description: Annotated[ + str, + Field( + description="longer one-line description about the project", + examples=["Dabbling in temporal transitions ..."], + ), + ] + thumbnail: Annotated[ + HttpUrl | None, + Field( + description="url of the project thumbnail", + examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"], + ), + ] - creation_date: datetime = Field(...) - last_change_date: datetime = Field(...) + creation_date: datetime + last_change_date: datetime # Pipeline of nodes (SEE projects_nodes.py) - workbench: Annotated[NodesDict, Field(..., description="Project's pipeline")] + workbench: Annotated[NodesDict, Field(description="Project's pipeline")] # validators _empty_thumbnail_is_none = field_validator("thumbnail", mode="before")( @@ -95,15 +102,18 @@ class BaseProjectModel(BaseModel): class ProjectAtDB(BaseProjectModel): # Model used to READ from database - id: int = Field(..., description="The table primary index") + id: Annotated[int, Field(description="The table primary index")] - project_type: ProjectType = Field(..., alias="type", description="The project type") + project_type: Annotated[ + ProjectType, Field(alias="type", description="The project type") + ] - prj_owner: int | None = Field(..., description="The project owner id") + prj_owner: Annotated[int | None, Field(description="The project owner id")] - published: bool | None = Field( - default=False, description="Defines if a study is available publicly" - ) + published: Annotated[ + bool | None, + Field(default=False, description="Defines if a study is available publicly"), + ] @field_validator("project_type", mode="before") @classmethod @@ -122,68 +132,60 @@ class Project(BaseProjectModel): # NOT for usage with DB!! # Ownership and Access (SEE projects_access.py) - prj_owner: LowerCaseEmailStr = Field( - ..., description="user email", alias="prjOwner" - ) - - # Timestamps - creation_date: DateTimeStr = Field( # type: ignore[assignment] - ..., - description="project creation date", - examples=["2018-07-01T11:13:43Z"], - alias="creationDate", - ) - last_change_date: DateTimeStr = Field( # type: ignore[assignment] - ..., - description="last save date", - examples=["2018-07-01T11:13:43Z"], - alias="lastChangeDate", - ) - access_rights: dict[GroupIDStr, AccessRights] = Field( - ..., - description="object containing the GroupID as key and read/write/execution permissions as value", - alias="accessRights", - ) + prj_owner: Annotated[ + LowerCaseEmailStr, Field(description="user email", alias="prjOwner") + ] + access_rights: Annotated[ + dict[GroupIDStr, AccessRights], + Field( + description="object containing the GroupID as key and read/write/execution permissions as value", + alias="accessRights", + ), + ] - # Classification - tags: list[int] | None = [] - classifiers: Annotated[ - list[ClassifierID] | None, + # Lifecycle + creation_date: Annotated[ # type: ignore[assignment] + DateTimeStr, Field( - default_factory=list, - description="Contains the reference to the project classifiers", - examples=["some:id:to:a:classifier"], + description="project creation date", + examples=["2018-07-01T11:13:43Z"], + alias="creationDate", ), - ] = DEFAULT_FACTORY + ] + last_change_date: Annotated[ # type: ignore[assignment] + DateTimeStr, + Field( + description="last save date", + examples=["2018-07-01T11:13:43Z"], + alias="lastChangeDate", + ), + ] # Project state (SEE projects_state.py) state: ProjectState | None = None - # UI front-end setup (SEE projects_ui.py) - ui: StudyUI | None = None - - # Quality - quality: dict[str, Any] = Field( - default_factory=dict, - description="stores the study quality assessment", - ) + # UI front-end fields (SEE projects_ui.py) + ui: dict[str, Any] | None = None + dev: dict[str, Any] | None = None - # Dev only - dev: dict | None = Field( - default=None, description="object used for development purposes only" - ) + # Parenthood + workspace_id: Annotated[ + WorkspaceID | None, + Field( + description="To which workspace project belongs. If None, belongs to private user workspace.", + alias="workspaceId", + ), + ] = None - workspace_id: WorkspaceID | None = Field( - default=None, - description="To which workspace project belongs. If None, belongs to private user workspace.", - alias="workspaceId", - ) - folder_id: FolderID | None = Field( - default=None, - description="To which folder project belongs. If None, belongs to root folder.", - alias="folderId", - ) + folder_id: Annotated[ + FolderID | None, + Field( + description="To which folder project belongs. If None, belongs to root folder.", + alias="folderId", + ), + ] = None + # trash state trashed: datetime | None = None trashed_by: Annotated[UserID | None, Field(alias="trashedBy")] = None trashed_by_primary_gid: Annotated[ @@ -191,7 +193,26 @@ class Project(BaseProjectModel): ] = None trashed_explicitly: Annotated[bool, Field(alias="trashedExplicitly")] = False + # Labeling + tags: Annotated[list[int] | None, Field(default_factory=list)] = DEFAULT_FACTORY + classifiers: Annotated[ + list[ClassifierID] | None, + Field( + default_factory=list, + description="Contains the reference to the project classifiers", + examples=["some:id:to:a:classifier"], + ), + ] = DEFAULT_FACTORY + quality: Annotated[ + dict[str, Any], + Field( + default_factory=dict, + description="stores the study quality assessment", + ), + ] = DEFAULT_FACTORY + model_config = ConfigDict( # NOTE: this is a security measure until we get rid of the ProjectDict variants extra="forbid", + populate_by_name=True, ) diff --git a/packages/models-library/src/models_library/projects_access.py b/packages/models-library/src/models_library/projects_access.py index 29ca6c9f592..a1e4db0cc31 100644 --- a/packages/models-library/src/models_library/projects_access.py +++ b/packages/models-library/src/models_library/projects_access.py @@ -25,6 +25,7 @@ class AccessRights(BaseModel): read: bool = Field(..., description="has read access") write: bool = Field(..., description="has write access") delete: bool = Field(..., description="has deletion rights") + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 3fec1406c57..81ffd16c165 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -27,7 +27,7 @@ PortLink, SimCoreFileLink, ) -from .projects_nodes_ui import Position +from .projects_nodes_layout import Position from .projects_state import RunningState from .services import ServiceKey, ServiceVersion diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 4d4637ac362..90fdf141278 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -1,9 +1,9 @@ """ - Link models used at i/o port nodes: - - Link to files: - - Generic: DownloadLink - - At Custom Service: SimCoreFileLink, DatCoreFileLink - - Link to another port: PortLink +Link models used at i/o port nodes: + - Link to files: + - Generic: DownloadLink + - At Custom Service: SimCoreFileLink, DatCoreFileLink + - Link to another port: PortLink """ from pathlib import Path @@ -36,8 +36,8 @@ NodeIDStr: TypeAlias = UUIDStr -LocationID = int -LocationName = str +LocationID: TypeAlias = int +LocationName: TypeAlias = str SimcoreS3FileID: TypeAlias = Annotated[ diff --git a/packages/models-library/src/models_library/projects_nodes_layout.py b/packages/models-library/src/models_library/projects_nodes_layout.py new file mode 100644 index 00000000000..774a2355c95 --- /dev/null +++ b/packages/models-library/src/models_library/projects_nodes_layout.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel, ConfigDict, Field + + +class Position(BaseModel): + x: int = Field(..., description="The x position", examples=[["12"]]) + y: int = Field(..., description="The y position", examples=[["15"]]) + + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/projects_nodes_ui.py b/packages/models-library/src/models_library/projects_nodes_ui.py deleted file mode 100644 index c5990598104..00000000000 --- a/packages/models-library/src/models_library/projects_nodes_ui.py +++ /dev/null @@ -1,21 +0,0 @@ -""" - Models node UI (legacy model, use instead projects.ui.py) -""" - -from typing import Annotated - -from pydantic import BaseModel, ConfigDict, Field, PlainSerializer -from pydantic_extra_types.color import Color - - -class Position(BaseModel): - x: int = Field(..., description="The x position", examples=[["12"]]) - y: int = Field(..., description="The y position", examples=[["15"]]) - - model_config = ConfigDict(extra="forbid") - - -class Marker(BaseModel): - color: Annotated[Color, PlainSerializer(Color.as_hex), Field(...)] - - model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/projects_ui.py b/packages/models-library/src/models_library/projects_ui.py deleted file mode 100644 index 170f3becc64..00000000000 --- a/packages/models-library/src/models_library/projects_ui.py +++ /dev/null @@ -1,77 +0,0 @@ -""" - Models Front-end UI -""" - -from typing import Annotated, Literal - -from pydantic import BaseModel, ConfigDict, Field, PlainSerializer, field_validator -from pydantic_extra_types.color import Color -from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict - TypedDict, -) - -from .projects_nodes_io import NodeID, NodeIDStr -from .projects_nodes_ui import Marker, Position -from .utils.common_validators import empty_str_to_none_pre_validator - - -class WorkbenchUI(BaseModel): - position: Position = Field(..., description="The node position in the workbench") - marker: Marker | None = None - model_config = ConfigDict(extra="forbid") - - -class _SlideshowRequired(TypedDict): - position: int - - -class Slideshow(_SlideshowRequired, total=False): - instructions: str | None # Instructions about what to do in this step - - -class Annotation(BaseModel): - type: Literal["note", "rect", "text"] = Field(...) - color: Annotated[Color, PlainSerializer(Color.as_hex), Field(...)] - attributes: dict = Field(..., description="svg attributes") - model_config = ConfigDict( - extra="forbid", - json_schema_extra={ - "examples": [ - { - "type": "note", - "color": "#FFFF00", - "attributes": { - "x": 415, - "y": 100, - "width": 117, - "height": 26, - "destinataryGid": 4, - "text": "ToDo", - }, - }, - { - "type": "rect", - "color": "#FF0000", - "attributes": {"x": 415, "y": 100, "width": 117, "height": 26}, - }, - { - "type": "text", - "color": "#0000FF", - "attributes": {"x": 415, "y": 100, "text": "Hey!"}, - }, - ] - }, - ) - - -class StudyUI(BaseModel): - workbench: dict[NodeIDStr, WorkbenchUI] | None = None - slideshow: dict[NodeIDStr, Slideshow] | None = None - current_node_id: NodeID | None = Field(default=None, alias="currentNodeId") - annotations: dict[NodeIDStr, Annotation] | None = None - - model_config = ConfigDict(extra="allow", populate_by_name=True) - - _empty_is_none = field_validator("*", mode="before")( - empty_str_to_none_pre_validator - ) diff --git a/packages/models-library/src/models_library/resource_tracker.py b/packages/models-library/src/models_library/resource_tracker.py index 953cb9b722d..a1d27cd74eb 100644 --- a/packages/models-library/src/models_library/resource_tracker.py +++ b/packages/models-library/src/models_library/resource_tracker.py @@ -232,7 +232,7 @@ class SpecificInfo(HardwareInfo): to store aws ec2 instance type.""" -class UnitExtraInfo(BaseModel): +class UnitExtraInfoTier(BaseModel): """Custom information that is propagated to the frontend. Defined fields are mandatory.""" CPU: NonNegativeInt @@ -256,10 +256,29 @@ class UnitExtraInfo(BaseModel): ) +class UnitExtraInfoLicense(BaseModel): + """Custom information that is propagated to the frontend. Defined fields are mandatory.""" + + num_of_seats: NonNegativeInt + + model_config = ConfigDict( + populate_by_name=True, + extra="allow", + json_schema_extra={ + "examples": [ + { + "num_of_seats": 5, + "custom key": "custom value", + } + ] + }, + ) + + class PricingUnitWithCostCreate(BaseModel): pricing_plan_id: PricingPlanId unit_name: str - unit_extra_info: UnitExtraInfo + unit_extra_info: UnitExtraInfoTier | UnitExtraInfoLicense default: bool specific_info: SpecificInfo cost_per_unit: Decimal @@ -271,7 +290,7 @@ class PricingUnitWithCostCreate(BaseModel): { "pricing_plan_id": 1, "unit_name": "My pricing plan", - "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "unit_extra_info": UnitExtraInfoTier.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "default": True, "specific_info": {"aws_ec2_instances": ["t3.medium"]}, "cost_per_unit": 10, @@ -291,7 +310,7 @@ class PricingUnitWithCostUpdate(BaseModel): pricing_plan_id: PricingPlanId pricing_unit_id: PricingUnitId unit_name: str - unit_extra_info: UnitExtraInfo + unit_extra_info: UnitExtraInfoTier | UnitExtraInfoLicense default: bool specific_info: SpecificInfo pricing_unit_cost_update: PricingUnitCostUpdate | None @@ -303,7 +322,7 @@ class PricingUnitWithCostUpdate(BaseModel): "pricing_plan_id": 1, "pricing_unit_id": 1, "unit_name": "My pricing plan", - "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "unit_extra_info": UnitExtraInfoTier.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "default": True, "specific_info": {"aws_ec2_instances": ["t3.medium"]}, "pricing_unit_cost_update": { @@ -315,7 +334,7 @@ class PricingUnitWithCostUpdate(BaseModel): "pricing_plan_id": 1, "pricing_unit_id": 1, "unit_name": "My pricing plan", - "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "unit_extra_info": UnitExtraInfoTier.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "default": True, "specific_info": {"aws_ec2_instances": ["t3.medium"]}, "pricing_unit_cost_update": None, diff --git a/packages/models-library/src/models_library/resource_tracker_licensed_items_purchases.py b/packages/models-library/src/models_library/resource_tracker_licensed_items_purchases.py index 8cddc1d98aa..1ea79606965 100644 --- a/packages/models-library/src/models_library/resource_tracker_licensed_items_purchases.py +++ b/packages/models-library/src/models_library/resource_tracker_licensed_items_purchases.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, ConfigDict -from .licensed_items import LicensedItemID +from .licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from .products import ProductName from .resource_tracker import PricingPlanId, PricingUnitCostId, PricingUnitId from .users import UserID @@ -17,6 +17,8 @@ class LicensedItemsPurchasesCreate(BaseModel): product_name: ProductName licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID wallet_name: str pricing_plan_id: PricingPlanId diff --git a/packages/models-library/src/models_library/rest_error.py b/packages/models-library/src/models_library/rest_error.py index ce3b5ef5d56..71cc1b877b6 100644 --- a/packages/models-library/src/models_library/rest_error.py +++ b/packages/models-library/src/models_library/rest_error.py @@ -1,6 +1,7 @@ from dataclasses import dataclass from typing import Annotated +from common_library.basic_types import DEFAULT_FACTORY from models_library.generics import Envelope from pydantic import BaseModel, ConfigDict, Field @@ -75,17 +76,17 @@ class ErrorGet(BaseModel): IDStr | None, Field(description="ID to track the incident during support", alias="supportId"), ] = None + status: int # NOTE: The fields blow are DEPRECATED. Still here to keep compatibilty with front-end until updated - status: Annotated[int, Field(deprecated=True)] = 400 errors: Annotated[ list[ErrorItemType], Field(deprecated=True, default_factory=list, json_schema_extra={"default": []}), - ] + ] = DEFAULT_FACTORY logs: Annotated[ list[LogMessageType], Field(deprecated=True, default_factory=list, json_schema_extra={"default": []}), - ] + ] = DEFAULT_FACTORY model_config = ConfigDict( populate_by_name=True, @@ -94,11 +95,13 @@ class ErrorGet(BaseModel): json_schema_extra={ "examples": [ { - "message": "Sorry you do not have sufficient access rights for product" + "message": "Sorry you do not have sufficient access rights for product", + "status": 401, }, { "message": "Opps this error was unexpected. We are working on that!", "supportId": "OEC:12346789", + "status": 500, }, ] }, @@ -111,9 +114,13 @@ class EnvelopedError(Envelope[None]): model_config = ConfigDict( json_schema_extra={ "examples": [ - {"error": {"message": "display error message here"}}, + {"error": {"message": "display error message here", "status": 401}}, { - "error": {"message": "failure", "supportId": "OEC:123455"}, + "error": { + "message": "failure", + "supportId": "OEC:123455", + "status": 500, + }, "data": None, }, ] diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py index b2c82726798..0b7e68e5222 100644 --- a/packages/models-library/src/models_library/rest_pagination.py +++ b/packages/models-library/src/models_library/rest_pagination.py @@ -26,23 +26,46 @@ int, Field(ge=1, lt=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE) ] +PageOffsetInt: TypeAlias = NonNegativeInt + DEFAULT_NUMBER_OF_ITEMS_PER_PAGE: Final[PageLimitInt] = TypeAdapter( PageLimitInt ).validate_python(20) -class PageQueryParameters(RequestParameters): +class CursorQueryParameters(RequestParameters): """Use as pagination options in query parameters""" - limit: PageLimitInt = Field( + size: PageLimitInt = Field( default=TypeAdapter(PageLimitInt).validate_python( DEFAULT_NUMBER_OF_ITEMS_PER_PAGE ), description="maximum number of items to return (pagination)", ) - offset: NonNegativeInt = Field( - default=0, description="index to the first item to return (pagination)" - ) + cursor: Annotated[ + str | None, + Field( + description="unique identifier that represent the position in the dataset" + ), + ] = None + + +class PageQueryParameters(RequestParameters): + """Use as pagination options in query parameters""" + + limit: Annotated[ + PageLimitInt, + Field( + default=TypeAdapter(PageLimitInt).validate_python( + DEFAULT_NUMBER_OF_ITEMS_PER_PAGE + ), + description="maximum number of items to return (pagination)", + ), + ] + offset: Annotated[ + PageOffsetInt, + Field(default=0, description="index to the first item to return (pagination)"), + ] class PageMetaInfoLimitOffset(BaseModel): @@ -103,8 +126,7 @@ class PageLinks( BeforeValidator(lambda x: str(TypeAdapter(AnyHttpUrl).validate_python(x))), ] ] -): - ... +): ... ItemT = TypeVar("ItemT") diff --git a/packages/models-library/src/models_library/services_access.py b/packages/models-library/src/models_library/services_access.py index 248e8f41e85..4c450684700 100644 --- a/packages/models-library/src/models_library/services_access.py +++ b/packages/models-library/src/models_library/services_access.py @@ -2,6 +2,8 @@ """ +from typing import Annotated + from pydantic import BaseModel, ConfigDict, Field from .groups import GroupID @@ -9,13 +11,12 @@ class ServiceGroupAccessRights(BaseModel): - execute_access: bool = Field( - default=False, - description="defines whether the group can execute the service", - ) - write_access: bool = Field( - default=False, description="defines whether the group can modify the service" - ) + execute_access: Annotated[ + bool, Field(description="defines whether the group can execute the service") + ] = False + write_access: Annotated[ + bool, Field(description="defines whether the group can modify the service") + ] = False class ServiceGroupAccessRightsV2(BaseModel): @@ -23,13 +24,17 @@ class ServiceGroupAccessRightsV2(BaseModel): write: bool = False model_config = ConfigDict( - alias_generator=snake_to_camel, populate_by_name=True, extra="forbid" + alias_generator=snake_to_camel, + populate_by_name=True, + extra="forbid", ) class ServiceAccessRights(BaseModel): - access_rights: dict[GroupID, ServiceGroupAccessRights] | None = Field( - None, - alias="accessRights", - description="service access rights per group id", - ) + access_rights: Annotated[ + dict[GroupID, ServiceGroupAccessRights] | None, + Field( + alias="accessRights", + description="service access rights per group id", + ), + ] = None diff --git a/packages/models-library/src/models_library/services_base.py b/packages/models-library/src/models_library/services_base.py index 48afb0b6c04..8e989b39ec9 100644 --- a/packages/models-library/src/models_library/services_base.py +++ b/packages/models-library/src/models_library/services_base.py @@ -16,48 +16,60 @@ class ServiceKeyVersion(BaseModel): description="distinctive name for the node based on the docker registry path", ), ] - version: ServiceVersion = Field( - ..., - description="service version number", - ) + version: Annotated[ + ServiceVersion, + Field( + description="service version number", + ), + ] model_config = ConfigDict(frozen=True) class ServiceBaseDisplay(BaseModel): - name: str = Field( - ..., - description="Display name: short, human readable name for the node", - examples=["Fast Counter"], - ) - thumbnail: Annotated[str, HttpUrl] | None = Field( - None, - description="url to the thumbnail", - examples=[ - "https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png" - ], - validate_default=True, - ) - description: str = Field( - ..., - description="human readable description of the purpose of the node", - examples=[ - "Our best node type", - "The mother of all nodes, makes your numbers shine!", - ], - ) - description_ui: bool = Field( - default=False, - description="A flag to enable the `description` to be presented as a single web page (=true) or in another structured format (default=false).", - ) - - version_display: str | None = Field( - None, - description="A user-friendly or marketing name for the release." - " This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.'" - " This name is not used for version comparison but is useful for communication and documentation purposes.", - ) - - _empty_is_none = field_validator("thumbnail", mode="before")( - empty_str_to_none_pre_validator - ) + name: Annotated[ + str, + Field( + description="Display name: short, human readable name for the node", + examples=["Fast Counter"], + ), + ] + thumbnail: Annotated[ + str | None, + Field( + description="URL to the service thumbnail", + validate_default=True, + ), + ] = None + icon: Annotated[ + HttpUrl | None, + Field(description="URL to the service icon"), + ] = None + description: Annotated[ + str, + Field( + description="human readable description of the purpose of the node", + examples=[ + "Our best node type", + "The mother of all nodes, makes your numbers shine!", + ], + ), + ] + description_ui: Annotated[ + bool, + Field( + description="A flag to enable the `description` to be presented as a single web page (=true) or in another structured format (default=false)." + ), + ] = False + version_display: Annotated[ + str | None, + Field( + description="A user-friendly or marketing name for the release." + "This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.' " + "This name is not used for version comparison but is useful for communication and documentation purposes." + ), + ] = None + + _empty_is_none = field_validator( + "icon", "thumbnail", "version_display", mode="before" + )(empty_str_to_none_pre_validator) diff --git a/packages/models-library/src/models_library/services_history.py b/packages/models-library/src/models_library/services_history.py index b38f5f2e783..91ed08fbe4b 100644 --- a/packages/models-library/src/models_library/services_history.py +++ b/packages/models-library/src/models_library/services_history.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import TypeAlias +from typing import Annotated, TypeAlias from pydantic import BaseModel, ConfigDict, Field @@ -8,41 +8,42 @@ class CompatibleService(BaseModel): - key: ServiceKey | None = Field( - default=None, - description="If None, it refer to current service. Used only for inter-service compatibility", - ) + key: Annotated[ + ServiceKey | None, + Field( + description="If None, it refer to current service. Used only for inter-service compatibility" + ), + ] = None version: ServiceVersion class Compatibility(BaseModel): - # NOTE: as an object it is more maintainable than a list - can_update_to: CompatibleService = Field( - ..., description="Latest compatible service at this moment" - ) + can_update_to: Annotated[ + CompatibleService, Field(description="Latest compatible service at this moment") + ] model_config = ConfigDict(alias_generator=snake_to_camel, populate_by_name=True) class ServiceRelease(BaseModel): - # from ServiceMetaDataPublished version: ServiceVersion - version_display: str | None = Field( - default=None, description="If None, then display `version`" - ) - released: datetime | None = Field( - default=None, description="When provided, it indicates the release timestamp" - ) - retired: datetime | None = Field( - default=None, - description="whether this service is planned to be retired. " - "If None, the service is still active. " - "If now=deprecated, the service is retired", - ) + deprecated: Annotated[ + datetime | None, + Field( + description="Owner can set the date to retire the service. Three possibilities:" + "If None, the service is marked as `published`;" + "If now=deprecated, the service is retired", + ), + ] = None classifiers: list[str] | None quality: Annotated[ dict[str, Any], Field(default_factory=dict, json_schema_extra={"default": {}}) - ] + ] = DEFAULT_FACTORY - model_config = ConfigDict( - json_schema_extra={ - "example": { - "key": "simcore/services/dynamic/sim4life", - "version": "1.0.9", - "name": "sim4life", - "description": "s4l web", - "thumbnail": "https://thumbnailit.org/image", - "quality": { - "enabled": True, - "tsr_target": { - f"r{n:02d}": {"level": 4, "references": ""} - for n in range(1, 11) + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "key": "simcore/services/dynamic/sim4life", + "version": "1.0.9", + "name": "sim4life", + "description": "s4l web", + "thumbnail": "https://thumbnailit.org/image", + "icon": "https://cdn-icons-png.flaticon.com/512/25/25231.png", + "quality": { + "enabled": True, + "tsr_target": { + f"r{n:02d}": {"level": 4, "references": ""} + for n in range(1, 11) + }, + "annotations": { + "vandv": "", + "limitations": "", + "certificationLink": "", + "certificationStatus": "Uncertified", + }, + "tsr_current": { + f"r{n:02d}": {"level": 0, "references": ""} + for n in range(1, 11) + }, }, - "annotations": { - "vandv": "", - "limitations": "", - "certificationLink": "", - "certificationStatus": "Uncertified", - }, - "tsr_current": { - f"r{n:02d}": {"level": 0, "references": ""} - for n in range(1, 11) - }, - }, - "classifiers": [], + "classifiers": [], + } } - } - ) + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) diff --git a/packages/models-library/src/models_library/users.py b/packages/models-library/src/models_library/users.py index c8860171b64..3b8d4344b4b 100644 --- a/packages/models-library/src/models_library/users.py +++ b/packages/models-library/src/models_library/users.py @@ -25,13 +25,14 @@ class PrivacyDict(TypedDict): + hide_username: bool hide_fullname: bool hide_email: bool class MyProfile(BaseModel): id: UserID - user_name: IDStr + user_name: UserNameID first_name: str | None last_name: str | None email: LowerCaseEmailStr @@ -50,7 +51,11 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "first_name": "PtN5Ab0uv", "last_name": "", "role": "GUEST", - "privacy": {"hide_email": True, "hide_fullname": False}, + "privacy": { + "hide_email": True, + "hide_fullname": False, + "hide_username": False, + }, } } ) diff --git a/packages/models-library/tests/test_api_schemas_webserver_users.py b/packages/models-library/tests/test_api_schemas_webserver_users.py new file mode 100644 index 00000000000..afefb91c481 --- /dev/null +++ b/packages/models-library/tests/test_api_schemas_webserver_users.py @@ -0,0 +1,81 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from copy import deepcopy + +import pytest +from common_library.users_enums import UserRole +from models_library.api_schemas_webserver.users import ( + MyProfileGet, + MyProfilePatch, +) +from pydantic import ValidationError + + +@pytest.mark.parametrize("user_role", [u.name for u in UserRole]) +def test_profile_get_role(user_role: str): + for example in MyProfileGet.model_json_schema()["examples"]: + data = deepcopy(example) + data["role"] = user_role + m1 = MyProfileGet(**data) + + data["role"] = UserRole(user_role) + m2 = MyProfileGet(**data) + assert m1 == m2 + + +def test_my_profile_patch_username_min_len(): + # minimum length username is 4 + with pytest.raises(ValidationError) as err_info: + MyProfilePatch.model_validate({"userName": "abc"}) + + assert err_info.value.error_count() == 1 + assert err_info.value.errors()[0]["type"] == "too_short" + + MyProfilePatch.model_validate({"userName": "abcd"}) # OK + + +def test_my_profile_patch_username_valid_characters(): + # Ensure valid characters (alphanumeric + . _ -) + with pytest.raises(ValidationError, match="start with a letter") as err_info: + MyProfilePatch.model_validate({"userName": "1234"}) + + assert err_info.value.error_count() == 1 + assert err_info.value.errors()[0]["type"] == "value_error" + + MyProfilePatch.model_validate({"userName": "u1234"}) # OK + + +def test_my_profile_patch_username_special_characters(): + # Ensure no consecutive special characters + with pytest.raises( + ValidationError, match="consecutive special characters" + ) as err_info: + MyProfilePatch.model_validate({"userName": "u1__234"}) + + assert err_info.value.error_count() == 1 + assert err_info.value.errors()[0]["type"] == "value_error" + + MyProfilePatch.model_validate({"userName": "u1_234"}) # OK + + # Ensure it doesn't end with a special character + with pytest.raises(ValidationError, match="end with") as err_info: + MyProfilePatch.model_validate({"userName": "u1234_"}) + + assert err_info.value.error_count() == 1 + assert err_info.value.errors()[0]["type"] == "value_error" + + MyProfilePatch.model_validate({"userName": "u1_234"}) # OK + + +def test_my_profile_patch_username_reserved_words(): + # Check reserved words (example list; extend as needed) + with pytest.raises(ValidationError, match="cannot be used") as err_info: + MyProfilePatch.model_validate({"userName": "admin"}) + + assert err_info.value.error_count() == 1 + assert err_info.value.errors()[0]["type"] == "value_error" + + MyProfilePatch.model_validate({"userName": "midas"}) # OK diff --git a/packages/models-library/tests/test_licenses.py b/packages/models-library/tests/test_licenses.py new file mode 100644 index 00000000000..67e4bff9c11 --- /dev/null +++ b/packages/models-library/tests/test_licenses.py @@ -0,0 +1,38 @@ +from models_library.api_schemas_webserver.licensed_items import LicensedItemRestGet +from models_library.licenses import LicensedItem +from pydantic import ConfigDict + + +def test_licensed_item_from_domain_model(): + for example in LicensedItem.model_json_schema()["examples"]: + item = LicensedItem.model_validate(example) + + got = LicensedItemRestGet.from_domain_model(item) + + assert item.display_name == got.display_name + + # nullable doi + assert ( + got.licensed_resources[0].source.doi + == item.licensed_resources[0]["source"]["doi"] + ) + + # date is required + assert got.licensed_resources[0].source.features["date"] + + # id is required + assert ( + got.licensed_resources[0].source.id + == item.licensed_resources[0]["source"]["id"] + ) + + # checks unset fields + assert "category_icon" not in got.licensed_resources[0].model_fields_set + + +def test_strict_check_of_examples(): + class TestLicensedItemRestGet(LicensedItemRestGet): + model_config = ConfigDict(extra="forbid") + + for example in LicensedItemRestGet.model_json_schema()["examples"]: + TestLicensedItemRestGet.model_validate(example) diff --git a/packages/models-library/tests/test_projects_nodes_ui.py b/packages/models-library/tests/test_projects_nodes_ui.py index 54a91c82832..25ce45b446d 100644 --- a/packages/models-library/tests/test_projects_nodes_ui.py +++ b/packages/models-library/tests/test_projects_nodes_ui.py @@ -1,5 +1,5 @@ import pytest -from models_library.projects_nodes_ui import Marker +from models_library.api_schemas_webserver.projects_nodes_ui import MarkerUI from pydantic_extra_types.color import Color @@ -7,5 +7,5 @@ "color_str,expected_color_str", [("#b7e28d", "#b7e28d"), ("Cyan", "#0ff")] ) def test_marker_color_serialized_to_hex(color_str, expected_color_str): - m = Marker(color=Color(color_str)) + m = MarkerUI(color=Color(color_str)) assert m.model_dump_json() == f'{{"color":"{expected_color_str}"}}' diff --git a/packages/models-library/tests/test_projects_ui.py b/packages/models-library/tests/test_projects_ui.py index 9cfdac1980c..99ede58e231 100644 --- a/packages/models-library/tests/test_projects_ui.py +++ b/packages/models-library/tests/test_projects_ui.py @@ -1,5 +1,5 @@ import pytest -from models_library.projects_ui import Annotation +from models_library.api_schemas_webserver.projects_ui import AnnotationUI from pydantic_extra_types.color import Color @@ -7,7 +7,7 @@ "color_str,expected_color_str", [("#b7e28d", "#b7e28d"), ("Cyan", "#0ff")] ) def test_annotation_color_serialized_to_hex(color_str, expected_color_str): - m = Annotation(type="text", color=Color(color_str), attributes={}) + m = AnnotationUI(type="text", color=Color(color_str), attributes={}) assert ( m.model_dump_json() == f'{{"type":"text","color":"{expected_color_str}","attributes":{{}}}}' diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index 119c5cfb2aa..e66f0cd911e 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -5,7 +5,6 @@ import json from copy import deepcopy -from pprint import pformat from typing import Any, Final, NamedTuple import pytest @@ -33,6 +32,10 @@ from models_library.services_resources import DEFAULT_SINGLE_SERVICE_NAME from models_library.utils.string_substitution import TextTemplate from pydantic import BaseModel, TypeAdapter, ValidationError +from pytest_simcore.pydantic_models import ( + assert_validation_model, + iter_model_examples_in_class, +) class _Parametrization(NamedTuple): @@ -89,17 +92,23 @@ def test_service_settings(): service_setting.set_destination_containers(["random_value1", "random_value2"]) -@pytest.mark.parametrize("model_cls", [SimcoreServiceLabels]) +@pytest.mark.parametrize( + "model_cls, example_name, example_data", + iter_model_examples_in_class(SimcoreServiceLabels), +) def test_correctly_detect_dynamic_sidecar_boot( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] + model_cls: type[BaseModel], example_name: str, example_data: Any ): - for name, example in model_cls_examples.items(): - print(name, ":", pformat(example)) - model_instance = TypeAdapter(model_cls).validate_python(example) - assert model_instance.callbacks_mapping is not None - assert model_instance.needs_dynamic_sidecar == ( - "simcore.service.paths-mapping" in example - ) + + model_instance = assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) + + assert isinstance(model_instance, SimcoreServiceLabels) + assert model_instance.callbacks_mapping is not None + assert model_instance.needs_dynamic_sidecar == ( + "simcore.service.paths-mapping" in example_data + ) def test_raises_error_if_http_entrypoint_is_missing(): diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index 07e1972c35a..59793cd02df 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -1,6 +1,6 @@ aiofiles==24.1.0 # via -r requirements/_base.in -aiosmtplib==3.0.2 +aiosmtplib==4.0.0 # via -r requirements/_base.in alembic==1.14.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in @@ -16,7 +16,7 @@ attrs==25.1.0 # referencing click==8.1.8 # via typer -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-semantic-conventions @@ -47,7 +47,7 @@ jsonschema==4.23.0 # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.8 +mako==1.3.9 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -68,16 +68,16 @@ mdurl==0.1.2 # via markdown-it-py multidict==6.1.0 # via yarl -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via opentelemetry-instrumentation-asyncpg -opentelemetry-instrumentation-asyncpg==0.50b0 +opentelemetry-instrumentation-asyncpg==0.51b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg @@ -98,7 +98,7 @@ orjson==3.10.15 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in packaging==24.2 # via opentelemetry-instrumentation -propcache==0.2.1 +propcache==0.3.0 # via yarl psycopg2-binary==2.9.10 # via sqlalchemy @@ -130,8 +130,16 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 @@ -156,7 +164,7 @@ rich==13.9.4 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -176,7 +184,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -typer==0.15.1 +typer==0.15.2 # via -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index c86445a93dc..53aa4b19cad 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -1,8 +1,8 @@ aiodocker==0.24.0 # via -r requirements/_test.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # aiodocker @@ -12,19 +12,19 @@ attrs==25.1.0 # via # -c requirements/_base.txt # aiohttp -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # requests charset-normalizer==3.4.1 # via requests -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in frozenlist==1.5.0 # via @@ -48,7 +48,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -61,12 +61,12 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -propcache==0.2.1 +propcache==0.3.0 # via # -c requirements/_base.txt # aiohttp # yarl -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -91,10 +91,6 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker python-dotenv==1.0.1 # via # -c requirements/_base.txt @@ -105,10 +101,6 @@ pyyaml==6.0.2 # -r requirements/_test.in requests==2.32.3 # via docker -six==1.17.0 - # via - # -c requirements/_base.txt - # python-dateutil sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt @@ -125,9 +117,10 @@ types-aiofiles==24.1.0.20241221 typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker # mypy # sqlalchemy2-stubs +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt index 2b4c07752ef..b5ed94588ea 100644 --- a/packages/notifications-library/requirements/_tools.txt +++ b/packages/notifications-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -46,7 +46,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -57,7 +57,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -79,7 +79,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index 202c6363adb..b16bdd318cf 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -4,7 +4,7 @@ annotated-types==0.7.0 # via pydantic asyncpg==0.30.0 # via sqlalchemy -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-semantic-conventions @@ -14,7 +14,7 @@ idna==3.10 # via yarl importlib-metadata==8.5.0 # via opentelemetry-api -mako==1.3.8 +mako==1.3.9 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -23,16 +23,16 @@ markupsafe==3.0.2 # via mako multidict==6.1.0 # via yarl -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via opentelemetry-instrumentation-asyncpg -opentelemetry-instrumentation-asyncpg==0.50b0 +opentelemetry-instrumentation-asyncpg==0.51b0 # via -r requirements/_base.in -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg @@ -43,7 +43,7 @@ orjson==3.10.15 # -r requirements/../../../packages/common-library/requirements/_base.in packaging==24.2 # via opentelemetry-instrumentation -propcache==0.2.1 +propcache==0.3.0 # via yarl psycopg2-binary==2.9.10 # via sqlalchemy diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt index 45fe8da1598..a9f890849e7 100644 --- a/packages/postgres-database/requirements/_migration.txt +++ b/packages/postgres-database/requirements/_migration.txt @@ -2,7 +2,7 @@ alembic==1.14.1 # via # -c requirements/_base.txt # -r requirements/_migration.in -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # requests @@ -20,7 +20,7 @@ idna==3.10 # via # -c requirements/_base.txt # requests -mako==1.3.8 +mako==1.3.9 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index 6a4bc1fec6f..1b1be316909 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -6,11 +6,11 @@ async-timeout==4.0.3 # via aiopg attrs==25.1.0 # via pytest-docker -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in greenlet==3.1.1 # via @@ -19,7 +19,7 @@ greenlet==3.1.1 # sqlalchemy iniconfig==2.0.0 # via pytest -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -34,7 +34,7 @@ psycopg2-binary==2.9.10 # -c requirements/_base.txt # aiopg # sqlalchemy -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -47,16 +47,14 @@ pytest-asyncio==0.23.8 # -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in python-dateutil==2.9.0.post0 - # via - # arrow - # faker + # via arrow pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -78,15 +76,16 @@ types-psycopg2==2.9.21.20250121 # via -r requirements/_test.in types-python-dateutil==2.9.0.20241206 # via arrow -types-requests==2.32.0.20241016 +types-requests==2.32.0.20250301 # via types-docker typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_migration.txt - # faker # mypy # sqlalchemy2-stubs +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt index 69c62e6a632..f896126c0b0 100644 --- a/packages/postgres-database/requirements/_tools.txt +++ b/packages/postgres-database/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -18,15 +18,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -45,7 +45,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -56,7 +56,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -78,7 +78,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/postgres-database/src/simcore_postgres_database/errors.py b/packages/postgres-database/src/simcore_postgres_database/aiopg_errors.py similarity index 53% rename from packages/postgres-database/src/simcore_postgres_database/errors.py rename to packages/postgres-database/src/simcore_postgres_database/aiopg_errors.py index 9c4fb417854..730d6f630ac 100644 --- a/packages/postgres-database/src/simcore_postgres_database/errors.py +++ b/packages/postgres-database/src/simcore_postgres_database/aiopg_errors.py @@ -1,25 +1,32 @@ -""" aiopg errors +"""aiopg errors - StandardError - |__ Warning - |__ Error - |__ InterfaceError - |__ DatabaseError - |__ DataError - |__ OperationalError - |__ IntegrityError - |__ InternalError - |__ ProgrammingError - |__ NotSupportedError +WARNING: these errors are not raised by asyncpg. Therefore all code using new sqlalchemy.ext.asyncio + MUST use instead import sqlalchemy.exc exceptions!!!! - - aiopg reuses DBAPI exceptions - SEE https://aiopg.readthedocs.io/en/stable/core.html?highlight=Exception#exceptions - SEE http://initd.org/psycopg/docs/module.html#dbapi-exceptions - SEE https://www.postgresql.org/docs/current/errcodes-appendix.html +StandardError +|__ Warning +|__ Error + |__ InterfaceError + |__ DatabaseError + |__ DataError + |__ OperationalError + |__ IntegrityError + |__ InternalError + |__ ProgrammingError + |__ NotSupportedError + +- aiopg reuses DBAPI exceptions + SEE https://aiopg.readthedocs.io/en/stable/core.html?highlight=Exception#exceptions + SEE http://initd.org/psycopg/docs/module.html#dbapi-exceptions + SEE https://www.postgresql.org/docs/current/errcodes-appendix.html """ + # NOTE: psycopg2.errors are created dynamically # pylint: disable=no-name-in-module -from psycopg2 import DatabaseError, DataError +from psycopg2 import ( + DatabaseError, + DataError, +) from psycopg2 import Error as DBAPIError from psycopg2 import ( IntegrityError, diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/061607911a22_drop_projects_version_control.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/061607911a22_drop_projects_version_control.py new file mode 100644 index 00000000000..d7f6f4dc9ec --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/061607911a22_drop_projects_version_control.py @@ -0,0 +1,234 @@ +"""drop projects_version_control + +Revision ID: 061607911a22 +Revises: 3fe27ff48f73 +Create Date: 2025-02-06 19:28:49.918139+00:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "061607911a22" +down_revision = "3fe27ff48f73" +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_table("projects_vc_heads") + op.drop_table("projects_vc_branches") + op.drop_table("projects_vc_tags") + op.drop_table("projects_vc_commits") + op.drop_table("projects_vc_snapshots") + op.drop_table("projects_vc_repos") + + +def downgrade(): + + op.create_table( + "projects_vc_snapshots", + sa.Column("checksum", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column( + "content", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'::jsonb"), + autoincrement=False, + nullable=False, + ), + sa.PrimaryKeyConstraint("checksum", name="projects_vc_snapshots_pkey"), + postgresql_ignore_search_path=False, + ) + + op.create_table( + "projects_vc_repos", + sa.Column( + "id", + sa.BIGINT(), + server_default=sa.text("nextval('projects_vc_repos_id_seq'::regclass)"), + autoincrement=True, + nullable=False, + ), + sa.Column("project_uuid", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("project_checksum", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "created", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["project_uuid"], + ["projects.uuid"], + name="fk_projects_vc_repos_project_uuid", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="projects_vc_repos_pkey"), + sa.UniqueConstraint("project_uuid", name="projects_vc_repos_project_uuid_key"), + postgresql_ignore_search_path=False, + ) + + op.create_table( + "projects_vc_commits", + sa.Column( + "id", + sa.BIGINT(), + server_default=sa.text("nextval('projects_vc_commits_id_seq'::regclass)"), + autoincrement=True, + nullable=False, + ), + sa.Column("repo_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("parent_commit_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "snapshot_checksum", sa.VARCHAR(), autoincrement=False, nullable=False + ), + sa.Column("message", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "created", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["parent_commit_id"], + ["projects_vc_commits.id"], + name="fk_projects_vc_commits_parent_commit_id", + onupdate="CASCADE", + ), + sa.ForeignKeyConstraint( + ["repo_id"], + ["projects_vc_repos.id"], + name="fk_projects_vc_commits_repo_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["snapshot_checksum"], + ["projects_vc_snapshots.checksum"], + name="fk_projects_vc_commits_snapshot_checksum", + onupdate="CASCADE", + ondelete="RESTRICT", + ), + sa.PrimaryKeyConstraint("id", name="projects_vc_commits_pkey"), + postgresql_ignore_search_path=False, + ) + + op.create_table( + "projects_vc_branches", + sa.Column("id", sa.BIGINT(), autoincrement=True, nullable=False), + sa.Column("repo_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("head_commit_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "created", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["head_commit_id"], + ["projects_vc_commits.id"], + name="fk_projects_vc_branches_head_commit_id", + onupdate="CASCADE", + ondelete="RESTRICT", + ), + sa.ForeignKeyConstraint( + ["repo_id"], + ["projects_vc_repos.id"], + name="projects_vc_branches_repo_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="projects_vc_branches_pkey"), + sa.UniqueConstraint("name", "repo_id", name="repo_branch_uniqueness"), + ) + + op.create_table( + "projects_vc_tags", + sa.Column("id", sa.BIGINT(), autoincrement=True, nullable=False), + sa.Column("repo_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("commit_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("message", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("hidden", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column( + "created", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["commit_id"], + ["projects_vc_commits.id"], + name="fk_projects_vc_tags_commit_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["repo_id"], + ["projects_vc_repos.id"], + name="fk_projects_vc_tags_repo_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="projects_vc_tags_pkey"), + sa.UniqueConstraint("name", "repo_id", name="repo_tag_uniqueness"), + ) + + op.create_table( + "projects_vc_heads", + sa.Column("repo_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("head_branch_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "modified", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["head_branch_id"], + ["projects_vc_branches.id"], + name="fk_projects_vc_heads_head_branch_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["repo_id"], + ["projects_vc_repos.id"], + name="projects_vc_branches_repo_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("repo_id", name="projects_vc_heads_pkey"), + sa.UniqueConstraint( + "head_branch_id", name="projects_vc_heads_head_branch_id_key" + ), + ) diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/163b11424cb1_convert_empty_str_to_null.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/163b11424cb1_convert_empty_str_to_null.py new file mode 100644 index 00000000000..781aa64ab01 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/163b11424cb1_convert_empty_str_to_null.py @@ -0,0 +1,52 @@ +"""enforce null + +Revision ID: 163b11424cb1 +Revises: a8d336ca9379 +Create Date: 2025-02-24 12:44:10.538469+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "163b11424cb1" +down_revision = "a8d336ca9379" +branch_labels = None +depends_on = None + + +def upgrade(): + + # SEE https://github.com/ITISFoundation/osparc-simcore/pull/7268 + + op.execute( + sa.DDL( + """ + UPDATE services_meta_data + SET thumbnail = NULL + WHERE thumbnail = ''; + """ + ) + ) + op.execute( + sa.DDL( + """ + UPDATE services_meta_data + SET version_display = NULL + WHERE version_display = ''; + """ + ) + ) + op.execute( + """ + UPDATE services_meta_data + SET icon = NULL + WHERE icon = ''; + """ + ) + + +def downgrade(): + """ + Nothing to be done here + """ diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/381336fa8001_add_product_name_to_licensed_item_to_.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/381336fa8001_add_product_name_to_licensed_item_to_.py new file mode 100644 index 00000000000..c5a2137e0a3 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/381336fa8001_add_product_name_to_licensed_item_to_.py @@ -0,0 +1,75 @@ +"""add product name to licensed item to resource table + +Revision ID: 381336fa8001 +Revises: d84edab53761 +Create Date: 2025-02-25 13:37:19.861701+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "381336fa8001" +down_revision = "d84edab53761" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + "licensed_item_to_resource", + sa.Column("product_name", sa.String(), nullable=True), + ) + + ### Added Manually --> + op.execute( + """ + UPDATE licensed_item_to_resource + SET product_name = 's4l' + WHERE product_name IS NULL + """ + ) + op.alter_column("licensed_item_to_resource", "product_name", nullable=False) + ### <-- Added Manually + + op.drop_constraint( + "uq_licensed_item_to_resource_resource_id", + "licensed_item_to_resource", + type_="unique", + ) + op.create_unique_constraint( + "uq_licensed_item_to_resource_resource_id", + "licensed_item_to_resource", + ["product_name", "licensed_resource_id"], + ) + op.create_foreign_key( + "fk_licensed_item_to_resource_product_name", + "licensed_item_to_resource", + "products", + ["product_name"], + ["name"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "fk_licensed_item_to_resource_product_name", + "licensed_item_to_resource", + type_="foreignkey", + ) + op.drop_constraint( + "uq_licensed_item_to_resource_resource_id", + "licensed_item_to_resource", + type_="unique", + ) + op.create_unique_constraint( + "uq_licensed_item_to_resource_resource_id", + "licensed_item_to_resource", + ["licensed_resource_id"], + ) + op.drop_column("licensed_item_to_resource", "product_name") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/3fe27ff48f73_new_icon_table.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/3fe27ff48f73_new_icon_table.py new file mode 100644 index 00000000000..3899ddb9787 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/3fe27ff48f73_new_icon_table.py @@ -0,0 +1,27 @@ +"""new icon table + +Revision ID: 3fe27ff48f73 +Revises: 611f956aa3e3 +Create Date: 2025-02-05 16:50:02.419293+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "3fe27ff48f73" +down_revision = "611f956aa3e3" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("services_meta_data", sa.Column("icon", sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("services_meta_data", "icon") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/5e43b5ec7604_licensed_resources_add_priority_column.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5e43b5ec7604_licensed_resources_add_priority_column.py new file mode 100644 index 00000000000..2505a68e37f --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5e43b5ec7604_licensed_resources_add_priority_column.py @@ -0,0 +1,30 @@ +"""licensed_resources add priority column + +Revision ID: 5e43b5ec7604 +Revises: e8ffc0c96336 +Create Date: 2025-02-18 12:24:49.105989+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5e43b5ec7604" +down_revision = "e8ffc0c96336" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "licensed_resources", + sa.Column("priority", sa.SmallInteger(), server_default="0", nullable=False), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("licensed_resources", "priority") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/611f956aa3e3_licensed_items_checkout_purchase_email_.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/611f956aa3e3_licensed_items_checkout_purchase_email_.py new file mode 100644 index 00000000000..9aab17c2231 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/611f956aa3e3_licensed_items_checkout_purchase_email_.py @@ -0,0 +1,49 @@ +"""licensed items checkout/purchase email mandatory + +Revision ID: 611f956aa3e3 +Revises: e71ea59858f4 +Create Date: 2025-02-07 12:47:56.235193+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "611f956aa3e3" +down_revision = "e71ea59858f4" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "resource_tracker_licensed_items_checkouts", + "user_email", + existing_type=sa.VARCHAR(), + nullable=False, + ) + op.alter_column( + "resource_tracker_licensed_items_purchases", + "user_email", + existing_type=sa.VARCHAR(), + nullable=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "resource_tracker_licensed_items_purchases", + "user_email", + existing_type=sa.VARCHAR(), + nullable=True, + ) + op.alter_column( + "resource_tracker_licensed_items_checkouts", + "user_email", + existing_type=sa.VARCHAR(), + nullable=True, + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/68777fdf9539_add_licensed_resources.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/68777fdf9539_add_licensed_resources.py new file mode 100644 index 00000000000..745e7a2e74d --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/68777fdf9539_add_licensed_resources.py @@ -0,0 +1,95 @@ +"""add licensed resources + +Revision ID: 68777fdf9539 +Revises: 061607911a22 +Create Date: 2025-02-09 10:24:50.533653+00:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "68777fdf9539" +down_revision = "061607911a22" +branch_labels = None +depends_on = None + + +# Reuse the existing Enum type +licensed_resource_type = postgresql.ENUM( + "VIP_MODEL", name="licensedresourcetype", create_type=False +) + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "licensed_resources", + sa.Column( + "licensed_resource_id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("display_name", sa.String(), nullable=False), + sa.Column("licensed_resource_name", sa.String(), nullable=False), + sa.Column( + "licensed_resource_type", + licensed_resource_type, # Reuse existing Enum instead of redefining it + nullable=False, + ), + sa.Column( + "licensed_resource_data", + postgresql.JSONB(astext_type=sa.Text()), + nullable=True, + ), + sa.Column( + "created", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "modified", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "trashed", + sa.DateTime(timezone=True), + nullable=True, + comment="The date and time when the licensed_resources was marked as trashed. Null if the licensed_resources has not been trashed [default].", + ), + sa.PrimaryKeyConstraint("licensed_resource_id"), + sa.UniqueConstraint( + "licensed_resource_name", + "licensed_resource_type", + name="uq_licensed_resource_name_type2", + ), + ) + # ### end Alembic commands ### + + # Migration of licensed resources from licensed_items table to new licensed_resources table + op.execute( + sa.DDL( + """ + INSERT INTO licensed_resources (display_name, licensed_resource_name, licensed_resource_type, licensed_resource_data, created, modified) + SELECT + display_name, + licensed_resource_name, + licensed_resource_type, + licensed_resource_data, + CURRENT_TIMESTAMP as created, + CURRENT_TIMESTAMP as modified + FROM licensed_items + """ + ) + ) + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("licensed_resources") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/78f24aaf3f78_new_products_ui_column.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/78f24aaf3f78_new_products_ui_column.py new file mode 100644 index 00000000000..3c36394729d --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/78f24aaf3f78_new_products_ui_column.py @@ -0,0 +1,36 @@ +"""new products ui column + +Revision ID: 78f24aaf3f78 +Revises: 68777fdf9539 +Create Date: 2025-02-12 16:06:09.815111+00:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "78f24aaf3f78" +down_revision = "68777fdf9539" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "products", + sa.Column( + "ui", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'::jsonb"), + nullable=False, + ), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("products", "ui") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/7994074c4d98_remove_cluster_to_groups.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/7994074c4d98_remove_cluster_to_groups.py new file mode 100644 index 00000000000..404e537d5fa --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/7994074c4d98_remove_cluster_to_groups.py @@ -0,0 +1,85 @@ +"""remove cluster_to_groups + +Revision ID: 7994074c4d98 +Revises: 381336fa8001 +Create Date: 2025-03-17 14:19:54.675073+00:00 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "7994074c4d98" +down_revision = "381336fa8001" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("cluster_to_groups") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "cluster_to_groups", + sa.Column("cluster_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("gid", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "read", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "write", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "delete", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "created", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["cluster_id"], + ["clusters.id"], + name="fk_cluster_to_groups_id_clusters", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["gid"], + ["groups.gid"], + name="fk_cluster_to_groups_gid_groups", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.UniqueConstraint( + "cluster_id", "gid", name="cluster_to_groups_cluster_id_gid_key" + ), + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/8403acca8759_new_users_privacy_hide_username_column.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8403acca8759_new_users_privacy_hide_username_column.py new file mode 100644 index 00000000000..91e5a72207f --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8403acca8759_new_users_privacy_hide_username_column.py @@ -0,0 +1,36 @@ +"""new users.privacy_hide_username column + +Revision ID: 8403acca8759 +Revises: f7f3c835f38a +Create Date: 2025-03-20 14:08:48.321587+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "8403acca8759" +down_revision = "f7f3c835f38a" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "users", + sa.Column( + "privacy_hide_username", + sa.Boolean(), + server_default=sa.text("false"), + nullable=False, + ), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("users", "privacy_hide_username") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/a53c3c153bc8_modify_licensed_items_resources_db.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/a53c3c153bc8_modify_licensed_items_resources_db.py new file mode 100644 index 00000000000..3f07cd80eba --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/a53c3c153bc8_modify_licensed_items_resources_db.py @@ -0,0 +1,159 @@ +"""modify licensed items/resources DB + +Revision ID: a53c3c153bc8 +Revises: 78f24aaf3f78 +Create Date: 2025-02-13 10:13:32.817207+00:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "a53c3c153bc8" +down_revision = "78f24aaf3f78" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "licensed_item_to_resource", + sa.Column("licensed_item_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column( + "licensed_resource_id", postgresql.UUID(as_uuid=True), nullable=False + ), + sa.Column( + "created", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "modified", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["licensed_item_id"], + ["licensed_items.licensed_item_id"], + name="fk_licensed_item_to_resource_licensed_item_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["licensed_resource_id"], + ["licensed_resources.licensed_resource_id"], + name="fk_licensed_item_to_resource_licensed_resource_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + ) + op.add_column("licensed_items", sa.Column("key", sa.String(), nullable=False)) + op.add_column("licensed_items", sa.Column("version", sa.String(), nullable=False)) + op.alter_column( + "licensed_items", "pricing_plan_id", existing_type=sa.BIGINT(), nullable=False + ) + op.alter_column( + "licensed_items", "product_name", existing_type=sa.VARCHAR(), nullable=False + ) + op.drop_constraint( + "uq_licensed_resource_name_type", "licensed_items", type_="unique" + ) + op.create_index( + "idx_licensed_items_key_version", + "licensed_items", + ["key", "version"], + unique=True, + ) + op.drop_column("licensed_items", "licensed_resource_data") + op.drop_column("licensed_items", "trashed") + op.drop_column("licensed_items", "licensed_resource_name") + op.add_column( + "resource_tracker_licensed_items_checkouts", + sa.Column("key", sa.String(), nullable=False), + ) + op.add_column( + "resource_tracker_licensed_items_checkouts", + sa.Column("version", sa.String(), nullable=False), + ) + op.create_index( + "idx_licensed_items_checkouts_key_version", + "resource_tracker_licensed_items_checkouts", + ["key", "version"], + unique=False, + ) + op.add_column( + "resource_tracker_licensed_items_purchases", + sa.Column("key", sa.String(), nullable=False), + ) + op.add_column( + "resource_tracker_licensed_items_purchases", + sa.Column("version", sa.String(), nullable=False), + ) + op.create_index( + "idx_licensed_items_purchases_key_version", + "resource_tracker_licensed_items_purchases", + ["key", "version"], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + "idx_licensed_items_purchases_key_version", + table_name="resource_tracker_licensed_items_purchases", + ) + op.drop_column("resource_tracker_licensed_items_purchases", "version") + op.drop_column("resource_tracker_licensed_items_purchases", "key") + op.drop_index( + "idx_licensed_items_checkouts_key_version", + table_name="resource_tracker_licensed_items_checkouts", + ) + op.drop_column("resource_tracker_licensed_items_checkouts", "version") + op.drop_column("resource_tracker_licensed_items_checkouts", "key") + op.add_column( + "licensed_items", + sa.Column( + "licensed_resource_name", sa.VARCHAR(), autoincrement=False, nullable=False + ), + ) + op.add_column( + "licensed_items", + sa.Column( + "trashed", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + comment="The date and time when the licensed_item was marked as trashed. Null if the licensed_item has not been trashed [default].", + ), + ) + op.add_column( + "licensed_items", + sa.Column( + "licensed_resource_data", + postgresql.JSONB(astext_type=sa.Text()), + autoincrement=False, + nullable=True, + ), + ) + op.drop_index("idx_licensed_items_key_version", table_name="licensed_items") + op.create_unique_constraint( + "uq_licensed_resource_name_type", + "licensed_items", + ["licensed_resource_name", "licensed_resource_type"], + ) + op.alter_column( + "licensed_items", "product_name", existing_type=sa.VARCHAR(), nullable=True + ) + op.alter_column( + "licensed_items", "pricing_plan_id", existing_type=sa.BIGINT(), nullable=True + ) + op.drop_column("licensed_items", "version") + op.drop_column("licensed_items", "key") + op.drop_table("licensed_item_to_resource") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/a8d336ca9379_idx_licensed_items_key_version_product.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/a8d336ca9379_idx_licensed_items_key_version_product.py new file mode 100644 index 00000000000..830b8221ec5 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/a8d336ca9379_idx_licensed_items_key_version_product.py @@ -0,0 +1,38 @@ +"""idx licensed items - key/version/product + +Revision ID: a8d336ca9379 +Revises: 5e43b5ec7604 +Create Date: 2025-02-21 14:29:42.575724+00:00 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = "a8d336ca9379" +down_revision = "5e43b5ec7604" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("idx_licensed_items_key_version", table_name="licensed_items") + op.create_index( + "idx_licensed_items_key_version_product", + "licensed_items", + ["key", "version", "product_name"], + unique=True, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("idx_licensed_items_key_version_product", table_name="licensed_items") + op.create_index( + "idx_licensed_items_key_version", + "licensed_items", + ["key", "version"], + unique=True, + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/d84edab53761_add_restriction_ondelete.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/d84edab53761_add_restriction_ondelete.py new file mode 100644 index 00000000000..f8f79b258a8 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/d84edab53761_add_restriction_ondelete.py @@ -0,0 +1,62 @@ +"""add restriction ondelete + +Revision ID: d84edab53761 +Revises: 163b11424cb1 +Create Date: 2025-02-25 09:18:14.541874+00:00 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = "d84edab53761" +down_revision = "163b11424cb1" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint( + "uq_licensed_item_to_resource_resource_id", + "licensed_item_to_resource", + ["licensed_resource_id"], + ) + op.drop_constraint( + "fk_rut_pricing_plan_to_service_key_and_version", + "resource_tracker_pricing_plan_to_service", + type_="foreignkey", + ) + op.create_foreign_key( + "fk_rut_pricing_plan_to_service_key_and_version", + "resource_tracker_pricing_plan_to_service", + "services_meta_data", + ["service_key", "service_version"], + ["key", "version"], + onupdate="CASCADE", + ondelete="RESTRICT", + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "fk_rut_pricing_plan_to_service_key_and_version", + "resource_tracker_pricing_plan_to_service", + type_="foreignkey", + ) + op.create_foreign_key( + "fk_rut_pricing_plan_to_service_key_and_version", + "resource_tracker_pricing_plan_to_service", + "services_meta_data", + ["service_key", "service_version"], + ["key", "version"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.drop_constraint( + "uq_licensed_item_to_resource_resource_id", + "licensed_item_to_resource", + type_="unique", + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/e71ea59858f4_add_uniqu_constraint_in_licensed_items.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/e71ea59858f4_add_uniqu_constraint_in_licensed_items.py new file mode 100644 index 00000000000..3af7ff911f8 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/e71ea59858f4_add_uniqu_constraint_in_licensed_items.py @@ -0,0 +1,32 @@ +"""add uniqu constraint in licensed_items + +Revision ID: e71ea59858f4 +Revises: 7d1c6425a51d" +Create Date: 2025-01-30 18:42:15.192968+00:00 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = "e71ea59858f4" +down_revision = "7d1c6425a51d" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint( + "uq_licensed_resource_name_type", + "licensed_items", + ["licensed_resource_name", "licensed_resource_type"], + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "uq_licensed_resource_name_type", "licensed_items", type_="unique" + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/e8ffc0c96336_add_is_hidden_on_market_field.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/e8ffc0c96336_add_is_hidden_on_market_field.py new file mode 100644 index 00000000000..6a32dec314d --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/e8ffc0c96336_add_is_hidden_on_market_field.py @@ -0,0 +1,35 @@ +"""add is_hidden_on_market field + +Revision ID: e8ffc0c96336 +Revises: a53c3c153bc8 +Create Date: 2025-02-13 18:05:42.851252+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "e8ffc0c96336" +down_revision = "a53c3c153bc8" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "licensed_items", + sa.Column( + "is_hidden_on_market", + sa.Boolean(), + server_default=sa.text("false"), + nullable=False, + ), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("licensed_items", "is_hidden_on_market") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/f7f3c835f38a_remove_clusters.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/f7f3c835f38a_remove_clusters.py new file mode 100644 index 00000000000..875cdf21124 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/f7f3c835f38a_remove_clusters.py @@ -0,0 +1,120 @@ +"""remove clusters + +Revision ID: f7f3c835f38a +Revises: 7994074c4d98 +Create Date: 2025-03-17 14:26:58.117504+00:00 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "f7f3c835f38a" +down_revision = "7994074c4d98" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "fk_comp_runs_cluster_id_clusters", "comp_runs", type_="foreignkey" + ) + op.drop_column("comp_runs", "cluster_id") + op.drop_table("clusters") + op.execute("DROP TRIGGER IF EXISTS cluster_modification on clusters;") + op.execute("DROP FUNCTION set_cluster_to_owner_group() CASCADE") + op.execute("DROP TYPE IF EXISTS clustertype") + # ### end Alembic commands ### + + +new_cluster_trigger = sa.DDL( + """ +DROP TRIGGER IF EXISTS cluster_modification on clusters; +CREATE TRIGGER cluster_modification +AFTER INSERT ON clusters + FOR EACH ROW + EXECUTE PROCEDURE set_cluster_to_owner_group(); +""" +) +assign_cluster_access_rights_to_owner_group_procedure_new = sa.DDL( + """ +CREATE OR REPLACE FUNCTION set_cluster_to_owner_group() RETURNS TRIGGER AS $$ +DECLARE + group_id BIGINT; +BEGIN + IF TG_OP = 'INSERT' THEN + INSERT INTO "cluster_to_groups" ("gid", "cluster_id", "read", "write", "delete") VALUES (NEW.owner, NEW.id, TRUE, TRUE, TRUE); + END IF; + RETURN NULL; +END; $$ LANGUAGE 'plpgsql'; + """ +) + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute(sa.DDL("DROP TRIGGER IF EXISTS cluster_modification on clusters;")) + op.execute("DROP TYPE IF EXISTS clustertype") + op.create_table( + "clusters", + sa.Column("id", sa.BIGINT(), autoincrement=True, nullable=False), + sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("description", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "type", + postgresql.ENUM("AWS", "ON_PREMISE", name="clustertype"), + autoincrement=False, + nullable=False, + ), + sa.Column("owner", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("thumbnail", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "created", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column("endpoint", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column( + "authentication", + postgresql.JSONB(astext_type=sa.Text()), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["owner"], + ["groups.gid"], + name="fk_clusters_gid_groups", + onupdate="CASCADE", + ondelete="RESTRICT", + ), + sa.PrimaryKeyConstraint("id", name="clusters_pkey"), + ) + + op.add_column( + "comp_runs", + sa.Column("cluster_id", sa.BIGINT(), autoincrement=False, nullable=True), + ) + op.create_foreign_key( + "fk_comp_runs_cluster_id_clusters", + "comp_runs", + "clusters", + ["cluster_id"], + ["id"], + onupdate="CASCADE", + ondelete="SET NULL", + ) + # ### end Alembic commands ### + op.execute(assign_cluster_access_rights_to_owner_group_procedure_new) + op.execute(new_cluster_trigger) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/cluster_to_groups.py b/packages/postgres-database/src/simcore_postgres_database/models/cluster_to_groups.py deleted file mode 100644 index 63996c1404b..00000000000 --- a/packages/postgres-database/src/simcore_postgres_database/models/cluster_to_groups.py +++ /dev/null @@ -1,73 +0,0 @@ -import sqlalchemy as sa -from sqlalchemy.sql import expression, func - -from ._common import RefActions -from .base import metadata -from .clusters import clusters -from .groups import groups - -cluster_to_groups = sa.Table( - "cluster_to_groups", - metadata, - sa.Column( - "cluster_id", - sa.BigInteger, - sa.ForeignKey( - clusters.c.id, - name="fk_cluster_to_groups_id_clusters", - onupdate=RefActions.CASCADE, - ondelete=RefActions.CASCADE, - ), - doc="Cluster unique ID", - ), - sa.Column( - "gid", - sa.BigInteger, - sa.ForeignKey( - groups.c.gid, - name="fk_cluster_to_groups_gid_groups", - onupdate=RefActions.CASCADE, - ondelete=RefActions.CASCADE, - ), - doc="Group unique IDentifier", - ), - # Access Rights flags --- - sa.Column( - "read", - sa.Boolean, - nullable=False, - server_default=expression.false(), - doc="If true, group can use the cluster", - ), - sa.Column( - "write", - sa.Boolean, - nullable=False, - server_default=expression.false(), - doc="If true, group can modify the cluster", - ), - sa.Column( - "delete", - sa.Boolean, - nullable=False, - server_default=expression.false(), - doc="If true, group can delete the cluster", - ), - # ----- - sa.Column( - "created", - sa.DateTime(), - nullable=False, - server_default=func.now(), - doc="Timestamp auto-generated upon creation", - ), - sa.Column( - "modified", - sa.DateTime(), - nullable=False, - server_default=func.now(), - onupdate=func.now(), - doc="Timestamp with last row update", - ), - sa.UniqueConstraint("cluster_id", "gid"), -) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/clusters.py b/packages/postgres-database/src/simcore_postgres_database/models/clusters.py deleted file mode 100644 index 39536ae241b..00000000000 --- a/packages/postgres-database/src/simcore_postgres_database/models/clusters.py +++ /dev/null @@ -1,111 +0,0 @@ -from enum import Enum - -import sqlalchemy as sa -from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.sql import func - -from ._common import RefActions -from .base import metadata - - -class ClusterType(Enum): - AWS = "AWS" - ON_PREMISE = "ON_PREMISE" - - -clusters = sa.Table( - "clusters", - metadata, - sa.Column( - "id", - sa.BigInteger, - nullable=False, - primary_key=True, - doc="Clusters index", - ), - sa.Column("name", sa.String, nullable=False, doc="Display name"), - sa.Column("description", sa.String, nullable=True, doc="Short description"), - sa.Column( - "type", - sa.Enum(ClusterType), - nullable=False, - doc="Classification of the cluster", - ), - sa.Column( - "owner", - sa.BigInteger, - sa.ForeignKey( - "groups.gid", - name="fk_clusters_gid_groups", - onupdate=RefActions.CASCADE, - ondelete=RefActions.RESTRICT, - ), - nullable=False, - doc="Identifier of the group that owns this cluster", - ), - sa.Column( - "thumbnail", - sa.String, - nullable=True, - doc="Link to image as to cluster thumbnail", - ), - sa.Column("endpoint", sa.String, nullable=False, doc="URL to access the cluster"), - sa.Column( - "authentication", - JSONB, - nullable=False, - doc="Authentication options (can be any of simple password, kerberos or jupyterhub" - ", for details see https://gateway.dask.org/authentication.html#", - ), - sa.Column( - "created", - sa.DateTime(), - nullable=False, - server_default=func.now(), - doc="Timestamp auto-generated upon creation", - ), - sa.Column( - "modified", - sa.DateTime(), - nullable=False, - server_default=func.now(), - onupdate=func.now(), # this will auto-update on modification - doc="Timestamp with last update", - ), -) - -# ------------------------ TRIGGERS -new_cluster_trigger = sa.DDL( - """ -DROP TRIGGER IF EXISTS cluster_modification on clusters; -CREATE TRIGGER cluster_modification -AFTER INSERT ON clusters - FOR EACH ROW - EXECUTE PROCEDURE set_cluster_to_owner_group(); -""" -) - - -# --------------------------- PROCEDURES -assign_cluster_access_rights_to_owner_group_procedure = sa.DDL( - """ -CREATE OR REPLACE FUNCTION set_cluster_to_owner_group() RETURNS TRIGGER AS $$ -DECLARE - group_id BIGINT; -BEGIN - IF TG_OP = 'INSERT' THEN - INSERT INTO "cluster_to_groups" ("gid", "cluster_id", "read", "write", "delete") VALUES (NEW.owner, NEW.id, TRUE, TRUE, TRUE); - END IF; - RETURN NULL; -END; $$ LANGUAGE 'plpgsql'; - """ -) - -sa.event.listen( - clusters, "after_create", assign_cluster_access_rights_to_owner_group_procedure -) -sa.event.listen( - clusters, - "after_create", - new_cluster_trigger, -) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py index d92227c07e2..26adb63077e 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py @@ -1,6 +1,4 @@ -""" Computational Runs Table - -""" +"""Computational Runs Table""" import sqlalchemy as sa from sqlalchemy.dialects.postgresql import JSONB @@ -45,18 +43,6 @@ nullable=False, doc="The user id with which the run entry is associated", ), - sa.Column( - "cluster_id", - sa.BigInteger(), - sa.ForeignKey( - "clusters.id", - name="fk_comp_runs_cluster_id_clusters", - onupdate=RefActions.CASCADE, - ondelete=RefActions.SET_NULL, - ), - nullable=True, - doc="The cluster id on which the run entry is associated, if NULL or 0 uses the default", - ), sa.Column( "iteration", sa.BigInteger, diff --git a/packages/postgres-database/src/simcore_postgres_database/models/licensed_item_to_resource.py b/packages/postgres-database/src/simcore_postgres_database/models/licensed_item_to_resource.py new file mode 100644 index 00000000000..81141bc132a --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/licensed_item_to_resource.py @@ -0,0 +1,60 @@ +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from ._common import RefActions, column_created_datetime, column_modified_datetime +from .base import metadata + +licensed_item_to_resource = sa.Table( + "licensed_item_to_resource", + metadata, + sa.Column( + "licensed_item_id", + postgresql.UUID(as_uuid=True), + sa.ForeignKey( + "licensed_items.licensed_item_id", + name="fk_licensed_item_to_resource_licensed_item_id", + onupdate=RefActions.CASCADE, + ondelete=RefActions.CASCADE, + ), + nullable=False, + ), + sa.Column( + "licensed_resource_id", + postgresql.UUID(as_uuid=True), + sa.ForeignKey( + "licensed_resources.licensed_resource_id", + name="fk_licensed_item_to_resource_licensed_resource_id", + onupdate=RefActions.CASCADE, + ondelete=RefActions.CASCADE, + ), + nullable=False, + ), + sa.Column( + "product_name", + sa.String, + sa.ForeignKey( + "products.name", + onupdate=RefActions.CASCADE, + ondelete=RefActions.CASCADE, + name="fk_licensed_item_to_resource_product_name", + ), + nullable=False, + ), + column_created_datetime(timezone=True), + column_modified_datetime(timezone=True), + ######### + sa.PrimaryKeyConstraint( + "licensed_item_id", + "licensed_resource_id", + name="pk_licensed_item_to_resource_item_and_resource_id", + ), + # NOTE: Currently, there is a constraint that a resource item ID cannot be in multiple licensed items. + # The reason is that the license key and license version coming from the internal license server are part of the licensed resource domain. + # Sim4Life performs a mapping on their side, where the license key and version are mapped to a licensed item. + # If this constraint is broken, the mapping logic in Sim4Life might break. + sa.UniqueConstraint( + "product_name", + "licensed_resource_id", + name="uq_licensed_item_to_resource_resource_id", + ), +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py b/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py index 5286a7eee5a..34003d9b042 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/licensed_items.py @@ -6,12 +6,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import postgresql -from ._common import ( - RefActions, - column_created_datetime, - column_modified_datetime, - column_trashed_datetime, -) +from ._common import RefActions, column_created_datetime, column_modified_datetime from .base import metadata @@ -30,16 +25,20 @@ class LicensedResourceType(str, enum.Enum): server_default=sa.text("gen_random_uuid()"), ), sa.Column( - "display_name", + "key", sa.String, nullable=False, - doc="Display name for front-end", ), sa.Column( - "licensed_resource_name", + "version", sa.String, nullable=False, - doc="Resource name identifier", + ), + sa.Column( + "display_name", + sa.String, + nullable=False, + doc="Display name for front-end", ), sa.Column( "licensed_resource_type", @@ -47,12 +46,6 @@ class LicensedResourceType(str, enum.Enum): nullable=False, doc="Resource type, ex. VIP_MODEL", ), - sa.Column( - "licensed_resource_data", - postgresql.JSONB, - nullable=True, - doc="Resource metadata. Used for read-only purposes", - ), sa.Column( "pricing_plan_id", sa.BigInteger, @@ -62,7 +55,7 @@ class LicensedResourceType(str, enum.Enum): onupdate=RefActions.CASCADE, ondelete=RefActions.RESTRICT, ), - nullable=True, + nullable=False, ), sa.Column( "product_name", @@ -73,10 +66,23 @@ class LicensedResourceType(str, enum.Enum): ondelete=RefActions.CASCADE, name="fk_resource_tracker_license_packages_product_name", ), - nullable=True, + nullable=False, doc="Product name identifier. If None, then the item is not exposed", ), + sa.Column( + "is_hidden_on_market", + sa.Boolean(), + nullable=False, + server_default=sa.text("false"), + doc="If true, the item is not listed on the market. (Public API might want to see all of them, even if they are not listed on the Market)", + ), column_created_datetime(timezone=True), column_modified_datetime(timezone=True), - column_trashed_datetime("licensed_item"), + sa.Index( + "idx_licensed_items_key_version_product", + "key", + "version", + "product_name", + unique=True, + ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/licensed_resources.py b/packages/postgres-database/src/simcore_postgres_database/models/licensed_resources.py new file mode 100644 index 00000000000..52747e9668e --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/licensed_resources.py @@ -0,0 +1,65 @@ +""" resource_tracker_service_runs table +""" + + +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from ._common import ( + column_created_datetime, + column_modified_datetime, + column_trashed_datetime, +) +from .base import metadata +from .licensed_items import LicensedResourceType + +licensed_resources = sa.Table( + "licensed_resources", + metadata, + sa.Column( + "licensed_resource_id", + postgresql.UUID(as_uuid=True), + nullable=False, + primary_key=True, + server_default=sa.text("gen_random_uuid()"), + ), + sa.Column( + "display_name", + sa.String, + nullable=False, + doc="Display name for front-end", + ), + sa.Column( + "licensed_resource_name", + sa.String, + nullable=False, + doc="Resource name identifier", + ), + sa.Column( + "licensed_resource_type", + sa.Enum(LicensedResourceType), + nullable=False, + doc="Resource type, ex. VIP_MODEL", + ), + sa.Column( + "licensed_resource_data", + postgresql.JSONB, + nullable=True, + doc="Resource metadata. Used for read-only purposes", + ), + sa.Column( + "priority", + sa.SmallInteger, + nullable=False, + server_default="0", + doc="Used for sorting 0 (first) > 1 (second) > 2 (third) (ex. if we want to manually adjust how it is presented in the Market)", + ), + column_created_datetime(timezone=True), + column_modified_datetime(timezone=True), + column_trashed_datetime("licensed_resources"), + sa.UniqueConstraint( + "licensed_resource_name", + "licensed_resource_type", + name="uq_licensed_resource_name_type2", + ), +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/products.py b/packages/postgres-database/src/simcore_postgres_database/models/products.py index 8a1dd8cf29e..a065b82946f 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/products.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/products.py @@ -146,6 +146,7 @@ class ProductLoginSettingsDict(TypedDict, total=False): nullable=False, doc="Regular expression that matches product hostname from an url string", ), + # EMAILS -------------------- sa.Column( "support_email", sa.String, @@ -200,6 +201,13 @@ class ProductLoginSettingsDict(TypedDict, total=False): doc="Overrides simcore_service_webserver.login.settings.LoginSettings." "SEE LoginSettingsForProduct", ), + sa.Column( + "ui", + JSONB, + nullable=False, + server_default=sa.text("'{}'::jsonb"), + doc="Front-end owned UI configuration", + ), sa.Column( "registration_email_template", sa.String, @@ -212,6 +220,7 @@ class ProductLoginSettingsDict(TypedDict, total=False): nullable=True, doc="Custom jinja2 template for registration email", ), + # lifecycle sa.Column( "created", sa.DateTime(), diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects.py b/packages/postgres-database/src/simcore_postgres_database/models/projects.py index d72cfd9b74e..abf85373571 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects.py @@ -13,11 +13,6 @@ class ProjectType(enum.Enum): - """ - template: template project - standard: standard project - """ - TEMPLATE = "TEMPLATE" STANDARD = "STANDARD" @@ -42,6 +37,7 @@ class ProjectType(enum.Enum): unique=True, doc="Unique global identifier", ), + # DISPLAY ---------------------------- sa.Column( "name", sa.String, @@ -60,6 +56,7 @@ class ProjectType(enum.Enum): nullable=True, doc="Link to thumbnail image", ), + # OWNERSHIP ---------------------------- sa.Column( "prj_owner", sa.BigInteger, @@ -73,34 +70,27 @@ class ProjectType(enum.Enum): doc="Project's owner", index=True, ), + # PARENTHOOD ---------------------------- sa.Column( - "creation_date", - sa.DateTime(), - nullable=False, - server_default=func.now(), - doc="Timestamp on creation", - ), - sa.Column( - "last_change_date", - sa.DateTime(), - nullable=False, - server_default=func.now(), - onupdate=func.now(), - doc="Timestamp with last update", - ), - sa.Column( - "access_rights", - JSONB, - nullable=False, - server_default=sa.text("'{}'::jsonb"), - doc="DEPRECATED: Read/write/delete access rights of each group (gid) on this project", + "workspace_id", + sa.BigInteger, + sa.ForeignKey( + "workspaces.workspace_id", + onupdate=RefActions.CASCADE, + ondelete=RefActions.CASCADE, + name="fk_projects_to_workspaces_id", + ), + nullable=True, + default=None, ), + # CHILDREN/CONTENT-------------------------- sa.Column( "workbench", sa.JSON, nullable=False, doc="Pipeline with the project's workflow. Schema in models_library.projects.Workbench", ), + # FRONT-END ---------------------------- sa.Column( "ui", JSONB, @@ -108,13 +98,6 @@ class ProjectType(enum.Enum): server_default=sa.text("'{}'::jsonb"), doc="UI components. Schema in models_library.projects_ui", ), - sa.Column( - "classifiers", - ARRAY(sa.String, dimensions=1), - nullable=False, - server_default="{}", # NOTE: I found this strange but https://stackoverflow.com/questions/30933266/empty-array-as-postgresql-array-column-default-value - doc="A list of standard labels to classify this project", - ), sa.Column( "dev", JSONB, @@ -122,13 +105,7 @@ class ProjectType(enum.Enum): server_default=sa.text("'{}'::jsonb"), doc="Free JSON to use as sandbox. Development only", ), - sa.Column( - "quality", - JSONB, - nullable=False, - server_default=sa.text("'{}'::jsonb"), - doc="Free JSON with quality assesment based on TSR", - ), + # FLAGS ---------------------------- sa.Column( "published", sa.Boolean, @@ -143,6 +120,22 @@ class ProjectType(enum.Enum): default=False, doc="If true, the project is by default not listed in the API", ), + # LIFECYCLE ---------------------------- + sa.Column( + "creation_date", + sa.DateTime(), + nullable=False, + server_default=func.now(), + doc="Timestamp on creation", + ), + sa.Column( + "last_change_date", + sa.DateTime(), + nullable=False, + server_default=func.now(), + onupdate=func.now(), + doc="Timestamp with last update", + ), column_trashed_datetime("projects"), column_trashed_by_user("projects", users_table=users), sa.Column( @@ -153,17 +146,30 @@ class ProjectType(enum.Enum): comment="Indicates whether the project was explicitly trashed by the user (true)" " or inherited its trashed status from a parent (false) [default].", ), + # TAGGING ---------------------------- sa.Column( - "workspace_id", - sa.BigInteger, - sa.ForeignKey( - "workspaces.workspace_id", - onupdate=RefActions.CASCADE, - ondelete=RefActions.CASCADE, - name="fk_projects_to_workspaces_id", - ), - nullable=True, - default=None, + "classifiers", + ARRAY(sa.String, dimensions=1), + nullable=False, + server_default="{}", + # NOTE: I found this strange but + # https://stackoverflow.com/questions/30933266/empty-array-as-postgresql-array-column-default-value + doc="A list of standard labels to classify this project", + ), + sa.Column( + "quality", + JSONB, + nullable=False, + server_default=sa.text("'{}'::jsonb"), + doc="Free JSON with quality assesment based on TSR", + ), + # DEPRECATED ---------------------------- + sa.Column( + "access_rights", + JSONB, + nullable=False, + server_default=sa.text("'{}'::jsonb"), + doc="DEPRECATED: Read/write/delete access rights of each group (gid) on this project", ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects_version_control.py b/packages/postgres-database/src/simcore_postgres_database/models/projects_version_control.py deleted file mode 100644 index 7d183f03942..00000000000 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects_version_control.py +++ /dev/null @@ -1,332 +0,0 @@ -# -# TODO: create template to produce these tables over another table other than project -# - -import sqlalchemy as sa -from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.sql import func - -from ._common import RefActions -from .base import metadata -from .projects import projects - -# REPOSITORES -# -# Projects under version-control are assigned a repository -# - keeps information of the current branch to recover HEAD ref -# - when repo is deleted, all project_vc_* get deleted -# - -projects_vc_repos = sa.Table( - "projects_vc_repos", - metadata, - sa.Column( - "id", - sa.BigInteger, - nullable=False, - primary_key=True, - doc="Global vc repo identifier index", - ), - sa.Column( - "project_uuid", - sa.String, - sa.ForeignKey( - projects.c.uuid, - name="fk_projects_vc_repos_project_uuid", - ondelete=RefActions.CASCADE, # if project is deleted, all references in project_vc_* tables are deleted except for projects_vc_snapshots. - onupdate=RefActions.CASCADE, - ), - nullable=False, - unique=True, - doc="Project under version control" - "Used as a working copy (WC) to produce/checkout snapshots.", - ), - sa.Column( - "project_checksum", - sa.String, - nullable=True, - doc="SHA-1 checksum of current working copy." - "Used as a cache mechanism stored at 'modified'" - "or to detect changes in state due to race conditions", - ), - sa.Column( - "created", - sa.DateTime(), - nullable=False, - server_default=func.now(), - doc="Creation timestamp for this row", - ), - sa.Column( - "modified", - sa.DateTime(), - nullable=False, - server_default=func.now(), - onupdate=func.now(), - doc="Timestamp for last changes", - ), -) - - -projects_vc_snapshots = sa.Table( - "projects_vc_snapshots", - metadata, - sa.Column( - "checksum", - sa.String, - primary_key=True, - nullable=False, - doc="SHA-1 checksum of snapshot." - "The columns projects_vc_repos.project_checksum and projects_vc_repos.snapshot_checksum " - "are both checksums of the same entity (i.e. a project) in two different states, " - "namely the project's WC and some snapshot respectively.", - ), - sa.Column( - "content", - JSONB, - nullable=False, - server_default=sa.text("'{}'::jsonb"), - doc="snapshot content", - ), -) - - -# -# COMMITS -# -# - should NEVER be modified explicitly after creation -# - commits are inter-related. WARNING with deletion -# -# SEE https://git-scm.com/book/en/v2/Git-Internals-Git-References - -projects_vc_commits = sa.Table( - "projects_vc_commits", - metadata, - sa.Column( - "id", - sa.BigInteger, - nullable=False, - primary_key=True, - doc="Global identifier throughout all repository's commits", - ), - sa.Column( - "repo_id", - sa.BigInteger, - sa.ForeignKey( - projects_vc_repos.c.id, - name="fk_projects_vc_commits_repo_id", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - nullable=False, - doc="Repository to which this commit belongs", - ), - sa.Column( - "parent_commit_id", - sa.BigInteger, - sa.ForeignKey( - "projects_vc_commits.id", - name="fk_projects_vc_commits_parent_commit_id", - onupdate=RefActions.CASCADE, - ), - nullable=True, - doc="Preceding commit", - ), - sa.Column( - "snapshot_checksum", - sa.String, - sa.ForeignKey( - projects_vc_snapshots.c.checksum, - name="fk_projects_vc_commits_snapshot_checksum", - ondelete=RefActions.RESTRICT, - onupdate=RefActions.CASCADE, - ), - nullable=False, - doc="SHA-1 checksum of snapshot." - "Used as revision/commit identifier since it is unique per repo", - ), - sa.Column("message", sa.String, doc="Commit message"), - sa.Column( - "created", - sa.DateTime(), - nullable=False, - server_default=func.now(), - doc="Timestamp for this snapshot", - ), -) - - -# -# head/TAGS -# -# SEE https://git-scm.com/book/en/v2/Git-Internals-Git-References - -projects_vc_tags = sa.Table( - "projects_vc_tags", - metadata, - sa.Column( - "id", - sa.BigInteger, - nullable=False, - primary_key=True, - doc="Global identifier throughout all repositories tags", - ), - sa.Column( - "repo_id", - sa.BigInteger, - sa.ForeignKey( - projects_vc_repos.c.id, - name="fk_projects_vc_tags_repo_id", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - nullable=False, - doc="Repository to which this commit belongs", - ), - sa.Column( - "commit_id", - sa.BigInteger, - sa.ForeignKey( - projects_vc_commits.c.id, - name="fk_projects_vc_tags_commit_id", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - nullable=False, - doc="Points to the tagged commit", - ), - sa.Column("name", sa.String, doc="Tag display name"), - sa.Column("message", sa.String, doc="Tag annotation"), - sa.Column( - "hidden", - sa.Boolean, - default=False, - doc="Skipped by default from tag listings." - "Normally intended for internal use tags", - ), - sa.Column( - "created", - sa.DateTime(), - nullable=False, - server_default=func.now(), - doc="Creation timestamp", - ), - sa.Column( - "modified", - sa.DateTime(), - nullable=False, - server_default=func.now(), - onupdate=func.now(), - doc="Timestamp for last changes", - ), - # CONSTRAINTS -------------- - sa.UniqueConstraint("name", "repo_id", name="repo_tag_uniqueness"), -) - - -# -# head/BRANCHES -# -# SEE https://git-scm.com/book/en/v2/Git-Internals-Git-References - -projects_vc_branches = sa.Table( - "projects_vc_branches", - metadata, - sa.Column( - "id", - sa.BigInteger, - nullable=False, - primary_key=True, - doc="Global identifier throughout all repositories branches", - ), - sa.Column( - "repo_id", - sa.BigInteger, - sa.ForeignKey( - projects_vc_repos.c.id, - name="projects_vc_branches_repo_id", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - nullable=False, - doc="Repository to which this branch belongs", - ), - sa.Column( - "head_commit_id", - sa.BigInteger, - sa.ForeignKey( - projects_vc_commits.c.id, - name="fk_projects_vc_branches_head_commit_id", - ondelete=RefActions.RESTRICT, - onupdate=RefActions.CASCADE, - ), - nullable=True, - doc="Points to the head commit of this branchNull heads are detached", - ), - sa.Column("name", sa.String, default="main", doc="Branch display name"), - sa.Column( - "created", - sa.DateTime(), - nullable=False, - server_default=func.now(), - doc="Creation timestamp", - ), - sa.Column( - "modified", - sa.DateTime(), - nullable=False, - server_default=func.now(), - onupdate=func.now(), - doc="Timestamp for last changes", - ), - # CONSTRAINTS -------------- - sa.UniqueConstraint("name", "repo_id", name="repo_branch_uniqueness"), -) - - -# -# HEADS -# -# - the last commit in a given repo, also called the HEAD reference -# - added in an association table to avoid circular dependency between projects_vc_repos and projects_vc_branches -# -# SEE https://git-scm.com/book/en/v2/Git-Internals-Git-References - -projects_vc_heads = sa.Table( - "projects_vc_heads", - metadata, - sa.Column( - "repo_id", - sa.BigInteger, - sa.ForeignKey( - projects_vc_repos.c.id, - name="projects_vc_branches_repo_id", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - primary_key=True, - nullable=False, - doc="Repository to which this branch belongs", - ), - sa.Column( - "head_branch_id", - sa.BigInteger, - sa.ForeignKey( - projects_vc_branches.c.id, - name="fk_projects_vc_heads_head_branch_id", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - unique=True, - nullable=True, - doc="Points to the current branch that holds the HEAD" - "Null is used for detached head", - ), - sa.Column( - "modified", - sa.DateTime(), - nullable=False, - server_default=func.now(), - onupdate=func.now(), - doc="Timestamp for last changes on head branch", - ), -) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_checkouts.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_checkouts.py index e3cabb899f7..91da1539372 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_checkouts.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_checkouts.py @@ -22,6 +22,16 @@ UUID(as_uuid=True), nullable=True, ), + sa.Column( + "key", + sa.String, + nullable=False, + ), + sa.Column( + "version", + sa.String, + nullable=False, + ), sa.Column( "wallet_id", sa.BigInteger, @@ -36,7 +46,7 @@ sa.Column( "user_email", sa.String, - nullable=True, + nullable=False, ), sa.Column("product_name", sa.String, nullable=False, doc="Product name"), sa.Column( @@ -73,4 +83,5 @@ onupdate=RefActions.CASCADE, ondelete=RefActions.RESTRICT, ), + sa.Index("idx_licensed_items_checkouts_key_version", "key", "version"), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_purchases.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_purchases.py index 51944f7089e..8e09f322c73 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_purchases.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_licensed_items_purchases.py @@ -29,6 +29,16 @@ UUID(as_uuid=True), nullable=False, ), + sa.Column( + "key", + sa.String, + nullable=False, + ), + sa.Column( + "version", + sa.String, + nullable=False, + ), sa.Column( "wallet_id", sa.BigInteger, @@ -75,7 +85,7 @@ sa.Column( "user_email", sa.String, - nullable=True, + nullable=False, ), sa.Column( "purchased_at", @@ -84,4 +94,5 @@ server_default=sa.sql.func.now(), ), column_modified_datetime(timezone=True), + sa.Index("idx_licensed_items_purchases_key_version", "key", "version"), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py index 5fd77bbbaad..b802b6724c4 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py @@ -50,6 +50,6 @@ ["services_meta_data.key", "services_meta_data.version"], name="fk_rut_pricing_plan_to_service_key_and_version", onupdate=RefActions.CASCADE, - ondelete=RefActions.CASCADE, + ondelete=RefActions.RESTRICT, ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/services.py b/packages/postgres-database/src/simcore_postgres_database/models/services.py index 30fbf5af696..ec12f0f3ca8 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/services.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/services.py @@ -68,6 +68,12 @@ nullable=True, doc="Link to image to us as service thumbnail (editable)", ), + sa.Column( + "icon", + sa.String, + nullable=True, + doc="Link to icon (editable)", + ), sa.Column( "version_display", sa.String, diff --git a/packages/postgres-database/src/simcore_postgres_database/models/users.py b/packages/postgres-database/src/simcore_postgres_database/models/users.py index b8ff7a455cd..7be2161ff86 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/users.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/users.py @@ -95,6 +95,13 @@ # # User Privacy Rules ------------------ # + sa.Column( + "privacy_hide_username", + sa.Boolean, + nullable=False, + server_default=expression.false(), + doc="If true, it hides users.name to others", + ), sa.Column( "privacy_hide_fullname", sa.Boolean, diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_payments.py b/packages/postgres-database/src/simcore_postgres_database/utils_payments.py index 7202eb21d74..de4db3abe11 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_payments.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_payments.py @@ -8,7 +8,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy -from . import errors +from . import aiopg_errors from .models.payments_transactions import PaymentTransactionState, payments_transactions _logger = logging.getLogger(__name__) @@ -29,16 +29,13 @@ def __bool__(self): return False -class PaymentAlreadyExists(PaymentFailure): - ... +class PaymentAlreadyExists(PaymentFailure): ... -class PaymentNotFound(PaymentFailure): - ... +class PaymentNotFound(PaymentFailure): ... -class PaymentAlreadyAcked(PaymentFailure): - ... +class PaymentAlreadyAcked(PaymentFailure): ... async def insert_init_payment_transaction( @@ -69,7 +66,7 @@ async def insert_init_payment_transaction( initiated_at=initiated_at, ) ) - except errors.UniqueViolation: + except aiopg_errors.UniqueViolation: return PaymentAlreadyExists(payment_id) return payment_id diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_products.py b/packages/postgres-database/src/simcore_postgres_database/utils_products.py index 33e877c21d0..dba8caf074b 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_products.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_products.py @@ -1,12 +1,8 @@ -""" Common functions to access products table - -""" - -import warnings +"""Common functions to access products table""" import sqlalchemy as sa +from sqlalchemy.ext.asyncio import AsyncConnection -from ._protocols import AiopgConnection, DBConnection from .models.groups import GroupType, groups from .models.products import products @@ -14,7 +10,10 @@ _GroupID = int -async def get_default_product_name(conn: DBConnection) -> str: +class EmptyProductsError(ValueError): ... + + +async def get_default_product_name(conn: AsyncConnection) -> str: """The first row in the table is considered as the default product :: raises ValueError if undefined @@ -23,15 +22,15 @@ async def get_default_product_name(conn: DBConnection) -> str: sa.select(products.c.name).order_by(products.c.priority) ) if not product_name: - msg = "No product defined in database" - raise ValueError(msg) + msg = "No product was defined in database. Upon construction, at least one product is added but there are none." + raise EmptyProductsError(msg) assert isinstance(product_name, str) # nosec return product_name -async def get_product_group_id( - connection: DBConnection, product_name: str +async def get_product_group_id_or_none( + connection: AsyncConnection, product_name: str ) -> _GroupID | None: group_id = await connection.scalar( sa.select(products.c.group_id).where(products.c.name == product_name) @@ -39,7 +38,9 @@ async def get_product_group_id( return None if group_id is None else _GroupID(group_id) -async def execute_get_or_create_product_group(conn, product_name: str) -> int: +async def get_or_create_product_group( + conn: AsyncConnection, product_name: str +) -> _GroupID: # # NOTE: Separated so it can be used in asyncpg and aiopg environs while both # coexist @@ -70,23 +71,3 @@ async def execute_get_or_create_product_group(conn, product_name: str) -> int: ) return group_id - - -async def get_or_create_product_group( - connection: AiopgConnection, product_name: str -) -> _GroupID: - """ - Returns group_id of a product. Creates it if undefined - """ - warnings.warn( - f"{__name__}.get_or_create_product_group uses aiopg which has been deprecated in this repo. Please use the asyncpg equivalent version instead" - "See https://github.com/ITISFoundation/osparc-simcore/issues/4529", - DeprecationWarning, - stacklevel=1, - ) - - async with connection.begin(): - group_id = await execute_get_or_create_product_group( - connection, product_name=product_name - ) - return _GroupID(group_id) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_products_prices.py b/packages/postgres-database/src/simcore_postgres_database/utils_products_prices.py index 7d39de55d00..549bcd116e2 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_products_prices.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_products_prices.py @@ -2,7 +2,7 @@ from typing import NamedTuple, TypeAlias import sqlalchemy as sa -from aiopg.sa.connection import SAConnection +from sqlalchemy.ext.asyncio import AsyncConnection from .constants import QUANTIZE_EXP_ARG from .models.products_prices import products_prices @@ -17,9 +17,9 @@ class ProductPriceInfo(NamedTuple): async def get_product_latest_price_info_or_none( - conn: SAConnection, product_name: str + conn: AsyncConnection, product_name: str ) -> ProductPriceInfo | None: - """None menans the product is not billable""" + """If the product is not billable, it returns None""" # newest price of a product result = await conn.execute( sa.select( @@ -30,7 +30,7 @@ async def get_product_latest_price_info_or_none( .order_by(sa.desc(products_prices.c.valid_from)) .limit(1) ) - row = await result.first() + row = result.one_or_none() if row and row.usd_per_credit is not None: assert row.min_payment_amount_usd is not None # nosec @@ -43,27 +43,24 @@ async def get_product_latest_price_info_or_none( return None -async def get_product_latest_stripe_info( - conn: SAConnection, product_name: str -) -> tuple[StripePriceID, StripeTaxRateID]: +async def get_product_latest_stripe_info_or_none( + conn: AsyncConnection, product_name: str +) -> tuple[StripePriceID, StripeTaxRateID] | None: # Stripe info of a product for latest price - row = await ( - await conn.execute( - sa.select( - products_prices.c.stripe_price_id, - products_prices.c.stripe_tax_rate_id, - ) - .where(products_prices.c.product_name == product_name) - .order_by(sa.desc(products_prices.c.valid_from)) - .limit(1) + result = await conn.execute( + sa.select( + products_prices.c.stripe_price_id, + products_prices.c.stripe_tax_rate_id, ) - ).fetchone() - if row is None: - msg = "No product Stripe info defined in database" - raise ValueError(msg) - return (row.stripe_price_id, row.stripe_tax_rate_id) + .where(products_prices.c.product_name == product_name) + .order_by(sa.desc(products_prices.c.valid_from)) + .limit(1) + ) + + row = result.one_or_none() + return (row.stripe_price_id, row.stripe_tax_rate_id) if row else None -async def is_payment_enabled(conn: SAConnection, product_name: str) -> bool: +async def is_payment_enabled(conn: AsyncConnection, product_name: str) -> bool: p = await get_product_latest_price_info_or_none(conn, product_name=product_name) return bool(p) # zero or None is disabled diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index 149bb50b6a1..9140dd5e43e 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -9,7 +9,7 @@ from pydantic import BaseModel, ConfigDict from sqlalchemy.dialects.postgresql import insert as pg_insert -from .errors import ForeignKeyViolation +from .aiopg_errors import ForeignKeyViolation from .models.projects import projects from .models.projects_metadata import projects_metadata @@ -33,11 +33,15 @@ class DBProjectInvalidAncestorsError(BaseProjectsMetadataError): class DBProjectInvalidParentProjectError(BaseProjectsMetadataError): - msg_template: str = "Project project_uuid={project_uuid!r} has invalid parent project uuid={parent_project_uuid!r}" + msg_template: str = ( + "Project project_uuid={project_uuid!r} has invalid parent project uuid={parent_project_uuid!r}" + ) class DBProjectInvalidParentNodeError(BaseProjectsMetadataError): - msg_template: str = "Project project_uuid={project_uuid!r} has invalid parent project uuid={parent_node_id!r}" + msg_template: str = ( + "Project project_uuid={project_uuid!r} has invalid parent project uuid={parent_node_id!r}" + ) # diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 9cab49d27fa..6ad87315183 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -9,7 +9,7 @@ from pydantic import BaseModel, ConfigDict, Field from sqlalchemy.dialects.postgresql import insert as pg_insert -from .errors import ForeignKeyViolation, UniqueViolation +from .aiopg_errors import ForeignKeyViolation, UniqueViolation from .models.projects_node_to_pricing_unit import projects_node_to_pricing_unit from .models.projects_nodes import projects_nodes @@ -30,11 +30,15 @@ class ProjectNodesNodeNotFoundError(BaseProjectNodesError): class ProjectNodesNonUniqueNodeFoundError(BaseProjectNodesError): - msg_template: str = "Multiple project found containing node {node_id}. TIP: misuse, the same node ID was found in several projects." + msg_template: str = ( + "Multiple project found containing node {node_id}. TIP: misuse, the same node ID was found in several projects." + ) class ProjectNodesDuplicateNodeError(BaseProjectNodesError): - msg_template: str = "Project node already exists, you cannot have 2x the same node in the same project." + msg_template: str = ( + "Project node already exists, you cannot have 2x the same node in the same project." + ) class ProjectNodeCreate(BaseModel): diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_repos.py b/packages/postgres-database/src/simcore_postgres_database/utils_repos.py index a00adb1455d..7f3fd9283a8 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_repos.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_repos.py @@ -42,7 +42,7 @@ async def pass_or_acquire_connection( @asynccontextmanager async def transaction_context( engine: AsyncEngine, connection: AsyncConnection | None = None -): +) -> AsyncIterator[AsyncConnection]: """ When to use: For WRITE operations! This function manages the database connection and ensures that a transaction context is established for write operations. diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_users.py b/packages/postgres-database/src/simcore_postgres_database/utils_users.py index ac5426bafde..c35123c9545 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_users.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_users.py @@ -1,18 +1,19 @@ -""" Free functions, repository pattern, errors and data structures for the users resource - i.e. models.users main table and all its relations +"""Free functions, repository pattern, errors and data structures for the users resource +i.e. models.users main table and all its relations """ import re import secrets import string from datetime import datetime +from typing import Any, Final import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from sqlalchemy import Column -from .errors import UniqueViolation +from .aiopg_errors import UniqueViolation from .models.users import UserRole, UserStatus, users from .models.users_details import users_pre_registration_details @@ -25,19 +26,29 @@ class UserNotFoundInRepoError(BaseUserRepoError): pass +# NOTE: see MyProfilePatch.user_name +MIN_USERNAME_LEN: Final[int] = 4 + + +def _generate_random_chars(length: int = MIN_USERNAME_LEN) -> str: + """returns `length` random digit character""" + return "".join(secrets.choice(string.digits) for _ in range(length)) + + def _generate_username_from_email(email: str) -> str: username = email.split("@")[0] # Remove any non-alphanumeric characters and convert to lowercase - return re.sub(r"[^a-zA-Z0-9]", "", username).lower() + username = re.sub(r"[^a-zA-Z0-9]", "", username).lower() + # Ensure the username is at least 4 characters long + if len(username) < MIN_USERNAME_LEN: + username += _generate_random_chars(length=MIN_USERNAME_LEN - len(username)) -def _generate_random_chars(length=5) -> str: - """returns `length` random digit character""" - return "".join(secrets.choice(string.digits) for _ in range(length - 1)) + return username -def generate_alternative_username(username) -> str: +def generate_alternative_username(username: str) -> str: return f"{username}_{_generate_random_chars()}" @@ -50,7 +61,7 @@ async def new_user( status: UserStatus, expires_at: datetime | None, ) -> RowProxy: - data = { + data: dict[str, Any] = { "name": _generate_username_from_email(email), "email": email, "password_hash": password_hash, @@ -65,7 +76,7 @@ async def new_user( user_id = await conn.scalar( users.insert().values(**data).returning(users.c.id) ) - except UniqueViolation: # noqa: PERF203 + except UniqueViolation: data["name"] = generate_alternative_username(data["name"]) result = await conn.execute( @@ -78,7 +89,7 @@ async def new_user( ).where(users.c.id == user_id) ) row = await result.first() - assert row # nosec + assert isinstance(row, RowProxy) # nosec return row @staticmethod @@ -231,9 +242,16 @@ def is_public(hide_attribute: Column, caller_id: int): return hide_attribute.is_(False) | (users.c.id == caller_id) -def visible_user_profile_cols(caller_id: int): +def visible_user_profile_cols(caller_id: int, *, username_label: str): """Returns user profile columns with visibility constraints applied based on privacy settings.""" return ( + sa.case( + ( + is_private(users.c.privacy_hide_username, caller_id), + None, + ), + else_=users.c.name, + ).label(username_label), sa.case( ( is_private(users.c.privacy_hide_email, caller_id), diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index f89b9cbcc8b..1d2dde70f97 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -25,8 +25,6 @@ random_project, random_user, ) -from simcore_postgres_database.models.cluster_to_groups import cluster_to_groups -from simcore_postgres_database.models.clusters import ClusterType, clusters from simcore_postgres_database.models.products import products from simcore_postgres_database.models.projects import projects from simcore_postgres_database.utils_projects_nodes import ( @@ -201,7 +199,6 @@ async def asyncpg_engine( # <-- WE SHOULD USE THIS ONE pg_sa_engine: sqlalchemy.engine.Engine, _make_asyncpg_engine: Callable[[bool], AsyncEngine], ) -> AsyncIterator[AsyncEngine]: - assert ( pg_sa_engine ), "Ensures pg db up, responsive, init (w/ tables) and/or migrated" @@ -280,37 +277,6 @@ async def _creator(conn, group: RowProxy | None = None, **overrides) -> RowProxy conn.execute(users.delete().where(users.c.id.in_(created_ids))) -@pytest.fixture -async def create_fake_cluster( - aiopg_engine: Engine, faker: Faker -) -> AsyncIterator[Callable[..., Awaitable[int]]]: - cluster_ids = [] - assert cluster_to_groups is not None - - async def _creator(**overrides) -> int: - insert_values = { - "name": "default cluster name", - "type": ClusterType.ON_PREMISE, - "description": None, - "endpoint": faker.domain_name(), - "authentication": faker.pydict(value_types=[str]), - } - insert_values.update(overrides) - async with aiopg_engine.acquire() as conn: - cluster_id = await conn.scalar( - clusters.insert().values(**insert_values).returning(clusters.c.id) - ) - cluster_ids.append(cluster_id) - assert cluster_id - return cluster_id - - yield _creator - - # cleanup - async with aiopg_engine.acquire() as conn: - await conn.execute(clusters.delete().where(clusters.c.id.in_(cluster_ids))) - - @pytest.fixture async def create_fake_project( aiopg_engine: Engine, diff --git a/packages/postgres-database/tests/docker-compose.yml b/packages/postgres-database/tests/docker-compose.yml index cfaa1c2b9ba..66d2f6468b4 100644 --- a/packages/postgres-database/tests/docker-compose.yml +++ b/packages/postgres-database/tests/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" + image: "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f" init: true environment: POSTGRES_USER: test diff --git a/packages/postgres-database/tests/products/conftest.py b/packages/postgres-database/tests/products/conftest.py index eb3d213c249..168ba260e18 100644 --- a/packages/postgres-database/tests/products/conftest.py +++ b/packages/postgres-database/tests/products/conftest.py @@ -7,7 +7,6 @@ from collections.abc import Callable import pytest -from aiopg.sa.exc import ResourceClosedError from faker import Faker from pytest_simcore.helpers.faker_factories import random_product from simcore_postgres_database.webserver_models import products @@ -15,7 +14,7 @@ @pytest.fixture -def products_regex() -> dict: +def products_regex() -> dict[str, str]: return { "s4l": r"(^s4l[\.-])|(^sim4life\.)", "osparc": r"^osparc.", @@ -24,12 +23,12 @@ def products_regex() -> dict: @pytest.fixture -def products_names(products_regex: dict) -> list[str]: +def products_names(products_regex: dict[str, str]) -> list[str]: return list(products_regex) @pytest.fixture -def make_products_table(products_regex: dict, faker: Faker) -> Callable: +def make_products_table(products_regex: dict[str, str], faker: Faker) -> Callable: async def _make(conn) -> None: for n, (name, regex) in enumerate(products_regex.items()): @@ -37,6 +36,7 @@ async def _make(conn) -> None: pg_insert(products) .values( **random_product( + fake=faker, name=name, display_name=f"Product {name.capitalize()}", short_name=name[:3].lower(), @@ -45,6 +45,7 @@ async def _make(conn) -> None: ) ) .on_conflict_do_update( + # osparc might be already injected as default! index_elements=[products.c.name], set_={ "display_name": f"Product {name.capitalize()}", @@ -55,9 +56,7 @@ async def _make(conn) -> None: ) ) - assert result.closed + assert not result.closed assert not result.returns_rows - with pytest.raises(ResourceClosedError): - await result.scalar() return _make diff --git a/packages/postgres-database/tests/products/test_models_products.py b/packages/postgres-database/tests/products/test_models_products.py index c385cd7e734..1f34fab7aa4 100644 --- a/packages/postgres-database/tests/products/test_models_products.py +++ b/packages/postgres-database/tests/products/test_models_products.py @@ -3,15 +3,10 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument - -import json from collections.abc import Callable from pathlib import Path -from pprint import pprint import sqlalchemy as sa -from aiopg.sa.engine import Engine -from aiopg.sa.result import ResultProxy, RowProxy from simcore_postgres_database.models.jinja2_templates import jinja2_templates from simcore_postgres_database.models.products import ( EmailFeedback, @@ -23,40 +18,37 @@ ) from simcore_postgres_database.webserver_models import products from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncEngine async def test_load_products( - aiopg_engine: Engine, make_products_table: Callable, products_regex: dict + asyncpg_engine: AsyncEngine, make_products_table: Callable, products_regex: dict ): exclude = { products.c.created, products.c.modified, } - async with aiopg_engine.acquire() as conn: + async with asyncpg_engine.connect() as conn: await make_products_table(conn) stmt = sa.select(*[c for c in products.columns if c not in exclude]) - result: ResultProxy = await conn.execute(stmt) - assert result.returns_rows - - rows: list[RowProxy] = await result.fetchall() + result = await conn.execute(stmt) + rows = result.fetchall() assert rows - assert { - row[products.c.name]: row[products.c.host_regex] for row in rows - } == products_regex + assert {row.name: row.host_regex for row in rows} == products_regex async def test_jinja2_templates_table( - aiopg_engine: Engine, osparc_simcore_services_dir: Path + asyncpg_engine: AsyncEngine, osparc_simcore_services_dir: Path ): templates_common_dir = ( osparc_simcore_services_dir / "web/server/src/simcore_service_webserver/templates/common" ) - async with aiopg_engine.acquire() as conn: + async with asyncpg_engine.connect() as conn: templates = [] # templates table for p in templates_common_dir.glob("*.jinja2"): @@ -105,10 +97,9 @@ async def test_jinja2_templates_table( products.c.name, jinja2_templates.c.name, products.c.short_name ).select_from(j) - result: ResultProxy = await conn.execute(stmt) - assert result.rowcount == 2 - rows = await result.fetchall() - assert sorted(r.as_tuple() for r in rows) == sorted( + result = await conn.execute(stmt) + rows = result.fetchall() + assert sorted(tuple(r) for r in rows) == sorted( [ ("osparc", "registration_email.jinja2", "osparc"), ("s4l", "registration_email.jinja2", "s4l web"), @@ -135,7 +126,7 @@ async def test_jinja2_templates_table( async def test_insert_select_product( - aiopg_engine: Engine, + asyncpg_engine: AsyncEngine, ): osparc_product = { "name": "osparc", @@ -172,9 +163,7 @@ async def test_insert_select_product( ], } - print(json.dumps(osparc_product)) - - async with aiopg_engine.acquire() as conn: + async with asyncpg_engine.begin() as conn: # writes stmt = ( pg_insert(products) @@ -188,12 +177,9 @@ async def test_insert_select_product( # reads stmt = sa.select(products).where(products.c.name == name) - row = await (await conn.execute(stmt)).fetchone() - print(row) + row = (await conn.execute(stmt)).one_or_none() assert row - pprint(dict(**row)) - assert row.manuals assert row.manuals == osparc_product["manuals"] diff --git a/packages/postgres-database/tests/products/test_products_to_templates.py b/packages/postgres-database/tests/products/test_products_to_templates.py index b1245b597d8..9a78aaba94c 100644 --- a/packages/postgres-database/tests/products/test_products_to_templates.py +++ b/packages/postgres-database/tests/products/test_products_to_templates.py @@ -10,12 +10,12 @@ import pytest import sqlalchemy as sa -from aiopg.sa.connection import SAConnection from faker import Faker from simcore_postgres_database.models.jinja2_templates import jinja2_templates from simcore_postgres_database.models.products import products from simcore_postgres_database.models.products_to_templates import products_to_templates from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncEngine @pytest.fixture @@ -48,54 +48,58 @@ def templates_dir( @pytest.fixture async def product_templates_in_db( - connection: SAConnection, + asyncpg_engine: AsyncEngine, make_products_table: Callable, products_names: list[str], templates_names: list[str], ): - await make_products_table(connection) - - # one version of all tempaltes - for template_name in templates_names: - await connection.execute( - jinja2_templates.insert().values( - name=template_name, content="fake template in database" + async with asyncpg_engine.begin() as conn: + await make_products_table(conn) + + # one version of all tempaltes + for template_name in templates_names: + await conn.execute( + jinja2_templates.insert().values( + name=template_name, content="fake template in database" + ) ) - ) - # only even products have templates - for product_name in products_names[0::2]: - await connection.execute( - products_to_templates.insert().values( - template_name=template_name, product_name=product_name + # only even products have templates + for product_name in products_names[0::2]: + await conn.execute( + products_to_templates.insert().values( + template_name=template_name, product_name=product_name + ) ) - ) async def test_export_and_import_table( - connection: SAConnection, + asyncpg_engine: AsyncEngine, product_templates_in_db: None, ): - exported_values = [] - excluded_names = {"created", "modified", "group_id"} - async for row in connection.execute( - sa.select(*(c for c in products.c if c.name not in excluded_names)) - ): - assert row - exported_values.append(dict(row)) - - # now just upsert them - for values in exported_values: - values["display_name"] += "-changed" - await connection.execute( - pg_insert(products) - .values(**values) - .on_conflict_do_update(index_elements=[products.c.name], set_=values) + + async with asyncpg_engine.connect() as connection: + exported_values = [] + excluded_names = {"created", "modified", "group_id"} + result = await connection.stream( + sa.select(*(c for c in products.c if c.name not in excluded_names)) ) + async for row in result: + assert row + exported_values.append(row._asdict()) + + # now just upsert them + for values in exported_values: + values["display_name"] += "-changed" + await connection.execute( + pg_insert(products) + .values(**values) + .on_conflict_do_update(index_elements=[products.c.name], set_=values) + ) async def test_create_templates_products_folder( - connection: SAConnection, + asyncpg_engine: AsyncEngine, templates_dir: Path, products_names: list[str], tmp_path: Path, @@ -121,20 +125,22 @@ async def test_create_templates_products_folder( shutil.copy(p, product_folder / p.name, follow_symlinks=False) # overrides if with files in database - async for row in connection.execute( - sa.select( - products_to_templates.c.product_name, - jinja2_templates.c.name, - jinja2_templates.c.content, + async with asyncpg_engine.connect() as conn: + result = await conn.stream( + sa.select( + products_to_templates.c.product_name, + jinja2_templates.c.name, + jinja2_templates.c.content, + ) + .select_from(products_to_templates.join(jinja2_templates)) + .where(products_to_templates.c.product_name == product_name) ) - .select_from(products_to_templates.join(jinja2_templates)) - .where(products_to_templates.c.product_name == product_name) - ): - assert row - template_path = product_folder / row.name - template_path.write_text(row.content) + async for row in result: + assert row + template_path = product_folder / row.name + template_path.write_text(row.content) - assert sorted( - product_folder / template_name for template_name in templates_names - ) == sorted(product_folder.rglob("*.*")) + assert sorted( + product_folder / template_name for template_name in templates_names + ) == sorted(product_folder.rglob("*.*")) diff --git a/packages/postgres-database/tests/products/test_utils_products.py b/packages/postgres-database/tests/products/test_utils_products.py index a1b84fe96dd..b25ffbc0ccf 100644 --- a/packages/postgres-database/tests/products/test_utils_products.py +++ b/packages/postgres-database/tests/products/test_utils_products.py @@ -3,43 +3,44 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument - -import asyncio from collections.abc import Callable import pytest import sqlalchemy as sa -from aiopg.sa.engine import Engine from simcore_postgres_database.models.groups import GroupType, groups from simcore_postgres_database.models.products import products from simcore_postgres_database.utils_products import ( + EmptyProductsError, get_default_product_name, get_or_create_product_group, - get_product_group_id, + get_product_group_id_or_none, ) +from sqlalchemy.ext.asyncio import AsyncEngine -async def test_default_product(aiopg_engine: Engine, make_products_table: Callable): - async with aiopg_engine.acquire() as conn: +async def test_default_product( + asyncpg_engine: AsyncEngine, make_products_table: Callable +): + async with asyncpg_engine.begin() as conn: await make_products_table(conn) default_product = await get_default_product_name(conn) assert default_product == "s4l" @pytest.mark.parametrize("pg_sa_engine", ["sqlModels"], indirect=True) -async def test_default_product_undefined(aiopg_engine: Engine): - async with aiopg_engine.acquire() as conn: - with pytest.raises(ValueError): +async def test_default_product_undefined(asyncpg_engine: AsyncEngine): + async with asyncpg_engine.connect() as conn: + with pytest.raises(EmptyProductsError): await get_default_product_name(conn) async def test_get_or_create_group_product( - aiopg_engine: Engine, make_products_table: Callable + asyncpg_engine: AsyncEngine, make_products_table: Callable ): - async with aiopg_engine.acquire() as conn: + async with asyncpg_engine.connect() as conn: await make_products_table(conn) - async for product_row in await conn.execute( + async for product_row in await conn.stream( sa.select(products.c.name, products.c.group_id).order_by( products.c.priority ) @@ -57,8 +58,7 @@ async def test_get_or_create_group_product( result = await conn.execute( groups.select().where(groups.c.gid == product_group_id) ) - assert result.rowcount == 1 - product_group = await result.first() + product_group = result.one() # check product's group assert product_group.type == GroupType.STANDARD @@ -78,9 +78,9 @@ async def test_get_or_create_group_product( result = await conn.execute( groups.select().where(groups.c.name == product_row.name) ) - assert result.rowcount == 1 + assert result.one() - assert product_group_id == await get_product_group_id( + assert product_group_id == await get_product_group_id_or_none( conn, product_name=product_row.name ) @@ -88,43 +88,14 @@ async def test_get_or_create_group_product( await conn.execute( groups.update().where(groups.c.gid == product_group_id).values(gid=1000) ) - product_group_id = await get_product_group_id( + product_group_id = await get_product_group_id_or_none( conn, product_name=product_row.name ) assert product_group_id == 1000 # if group is DELETED -> product.group_id=null await conn.execute(groups.delete().where(groups.c.gid == product_group_id)) - product_group_id = await get_product_group_id( + product_group_id = await get_product_group_id_or_none( conn, product_name=product_row.name ) assert product_group_id is None - - -@pytest.mark.skip( - reason="Not relevant. Will review in https://github.com/ITISFoundation/osparc-simcore/issues/3754" -) -async def test_get_or_create_group_product_concurrent( - aiopg_engine: Engine, make_products_table: Callable -): - async with aiopg_engine.acquire() as conn: - await make_products_table(conn) - - async def _auto_create_products_groups(): - async with aiopg_engine.acquire() as conn: - async for product_row in await conn.execute( - sa.select(products.c.name, products.c.group_id).order_by( - products.c.priority - ) - ): - # get or create - return await get_or_create_product_group( - conn, product_name=product_row.name - ) - return None - - tasks = [asyncio.create_task(_auto_create_products_groups()) for _ in range(5)] - - results = await asyncio.gather(*tasks) - - assert all(res == results[0] for res in results[1:]) diff --git a/packages/postgres-database/tests/projects/conftest.py b/packages/postgres-database/tests/projects/conftest.py deleted file mode 100644 index 2a1b9c99f24..00000000000 --- a/packages/postgres-database/tests/projects/conftest.py +++ /dev/null @@ -1,55 +0,0 @@ -# pylint: disable=no-value-for-parameter -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - - -from collections.abc import AsyncIterable - -import pytest -from aiopg.sa.connection import SAConnection -from aiopg.sa.engine import Engine -from aiopg.sa.result import ResultProxy, RowProxy -from pytest_simcore.helpers.faker_factories import random_project, random_user -from simcore_postgres_database.models.projects import projects -from simcore_postgres_database.models.users import users - - -@pytest.fixture -async def user(aiopg_engine: Engine) -> RowProxy: - _USERNAME = f"{__name__}.me" - # some user - async with aiopg_engine.acquire() as conn: - result: ResultProxy | None = await conn.execute( - users.insert().values(**random_user(name=_USERNAME)).returning(users) - ) - assert result.rowcount == 1 - - _user: RowProxy | None = await result.first() - assert _user - assert _user.name == _USERNAME - return _user - - -@pytest.fixture -async def project(aiopg_engine: Engine, user: RowProxy) -> RowProxy: - _PARENT_PROJECT_NAME = f"{__name__}.parent" - # a user's project - async with aiopg_engine.acquire() as conn: - result: ResultProxy | None = await conn.execute( - projects.insert() - .values(**random_project(prj_owner=user.id, name=_PARENT_PROJECT_NAME)) - .returning(projects) - ) - assert result.rowcount == 1 - - _project: RowProxy | None = await result.first() - assert _project - assert _project.name == _PARENT_PROJECT_NAME - return _project - - -@pytest.fixture -async def conn(aiopg_engine: Engine) -> AsyncIterable[SAConnection]: - async with aiopg_engine.acquire() as conn: - yield conn diff --git a/packages/postgres-database/tests/projects/test_projects_version_control.py b/packages/postgres-database/tests/projects/test_projects_version_control.py deleted file mode 100644 index be075c67928..00000000000 --- a/packages/postgres-database/tests/projects/test_projects_version_control.py +++ /dev/null @@ -1,287 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-statements - -import hashlib -import json -from typing import Any -from uuid import UUID, uuid3 - -import pytest -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import RowProxy -from simcore_postgres_database.models.projects import projects -from simcore_postgres_database.models.projects_version_control import ( - projects_vc_branches, - projects_vc_commits, - projects_vc_heads, - projects_vc_repos, - projects_vc_snapshots, - projects_vc_tags, -) -from simcore_postgres_database.utils_aiopg_orm import BaseOrm - - -class ReposOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_repos, - connection, - readonly={"id", "created", "modified"}, - ) - - -class BranchesOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_branches, - connection, - readonly={"id", "created", "modified"}, - ) - - -class CommitsOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_commits, - connection, - readonly={"id", "created", "modified"}, - # pylint: disable=no-member - writeonce=set(projects_vc_commits.columns.keys()), - ) - - -class TagsOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_tags, - connection, - readonly={"id", "created", "modified"}, - ) - - -class ProjectsOrm(BaseOrm[str]): - def __init__(self, connection: SAConnection): - super().__init__( - projects, - connection, - readonly={"id", "creation_date", "last_change_date"}, - writeonce={"uuid"}, - ) - - -class SnapshotsOrm(BaseOrm[str]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_snapshots, - connection, - writeonce={"checksum"}, # TODO: all? cannot delete snapshots? - ) - - -class HeadsOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_heads, - connection, - writeonce={"repo_id"}, - ) - - -# ------------- - - -def eval_checksum(workbench: dict[str, Any]): - # FIXME: prototype - block_string = json.dumps(workbench, sort_keys=True).encode("utf-8") - raw_hash = hashlib.sha256(block_string) - return raw_hash.hexdigest() - - -def eval_snapshot_uuid(repo: RowProxy, commit: RowProxy) -> UUID: - assert repo.id == commit.repo_id # nosec - return uuid3(UUID(repo.project_uuid), f"{repo.id}.{commit.snapshot_checksum}") - - -async def add_snapshot( - project_wc: RowProxy, checksum: str, repo: RowProxy, conn: SAConnection -) -> str: - snapshot_orm = SnapshotsOrm(conn) - row_id = await snapshot_orm.insert( - checksum=checksum, - content={"workbench": project_wc.workbench, "ui": project_wc.ui}, - ) - assert row_id == checksum - return checksum - - -async def test_basic_workflow(project: RowProxy, conn: SAConnection): - - # git init - async with conn.begin(): - # create repo - repo_orm = ReposOrm(conn) - repo_id = await repo_orm.insert(project_uuid=project.uuid) - assert repo_id is not None - assert isinstance(repo_id, int) - - repo_orm.set_filter(rowid=repo_id) - repo = await repo_orm.fetch() - assert repo - assert repo.project_uuid == project.uuid - assert repo.project_checksum is None - assert repo.created == repo.modified - - # create main branch - branches_orm = BranchesOrm(conn) - branch_id = await branches_orm.insert(repo_id=repo.id) - assert branch_id is not None - assert isinstance(branch_id, int) - - branches_orm.set_filter(rowid=branch_id) - main_branch: RowProxy | None = await branches_orm.fetch() - assert main_branch - assert main_branch.name == "main", "Expected 'main' as default branch" - assert main_branch.head_commit_id is None, "still not assigned" - assert main_branch.created == main_branch.modified - - # assign head branch - heads_orm = HeadsOrm(conn) - await heads_orm.insert(repo_id=repo.id, head_branch_id=branch_id) - - heads_orm.set_filter(rowid=repo.id) - head = await heads_orm.fetch() - assert head - - # - # create first commit -- TODO: separate tests - - # fetch a *full copy* of the project (WC) - repo = await repo_orm.fetch("id project_uuid project_checksum") - assert repo - - project_orm = ProjectsOrm(conn).set_filter(uuid=repo.project_uuid) - project_wc = await project_orm.fetch() - assert project_wc - assert project == project_wc - - # call external function to compute checksum - checksum = eval_checksum(project_wc.workbench) - assert repo.project_checksum != checksum - - # take snapshot <=> git add & commit - async with conn.begin(): - - snapshot_checksum = await add_snapshot(project_wc, checksum, repo, conn) - - # get HEAD = repo.branch_id -> .head_commit_id - assert head.repo_id == repo.id - branches_orm.set_filter(head.head_branch_id) - branch = await branches_orm.fetch("head_commit_id name") - assert branch - assert branch.name == "main" - assert branch.head_commit_id is None, "First commit" - - # create commit - commits_orm = CommitsOrm(conn) - commit_id = await commits_orm.insert( - repo_id=repo.id, - parent_commit_id=branch.head_commit_id, - snapshot_checksum=snapshot_checksum, - message="first commit", - ) - assert commit_id - assert isinstance(commit_id, int) - - # update branch head - await branches_orm.update(head_commit_id=commit_id) - - # update checksum cache - await repo_orm.update(project_checksum=snapshot_checksum) - - # log history - commits = await commits_orm.fetch_all() - assert len(commits) == 1 - assert commits[0].id == commit_id - - # tag - tag_orm = TagsOrm(conn) - tag_id = await tag_orm.insert( - repo_id=repo.id, - commit_id=commit_id, - name="v1", - ) - assert tag_id is not None - assert isinstance(tag_id, int) - - tag = await tag_orm.fetch(rowid=tag_id) - assert tag - assert tag.name == "v1" - - ############# NEW COMMIT ##################### - - # user add some changes - repo = await repo_orm.fetch() - assert repo - - project_orm.set_filter(uuid=repo.project_uuid) - assert project_orm.is_filter_set() - - await project_orm.update( - workbench={ - "node": { - "input": 3, - } - } - ) - - project_wc = await project_orm.fetch("workbench ui") - assert project_wc - assert project.workbench != project_wc.workbench - - # get HEAD = repo.branch_id -> .head_commit_id - head = await heads_orm.fetch() - assert head - branch = await branches_orm.fetch("head_commit_id", rowid=head.head_branch_id) - assert branch - # TODO: get subquery ... and compose - head_commit = await commits_orm.fetch(rowid=branch.head_commit_id) - assert head_commit - - # compare checksums between wc and HEAD - checksum = eval_checksum(project_wc.workbench) - assert head_commit.snapshot_checksum != checksum - - # updates wc checksum cache - await repo_orm.update(project_checksum=checksum) - - # take snapshot = add & commit - async with conn.begin(): - await add_snapshot(project_wc, checksum, repo, conn) - - commit_id = await commits_orm.insert( - repo_id=head_commit.repo_id, - parent_commit_id=head_commit.id, - snapshot_checksum=checksum, - message="second commit", - ) - assert commit_id - assert isinstance(commit_id, int) - - # update branch head - await branches_orm.update(head_commit_id=commit_id) - - # log history - commits = await commits_orm.fetch_all() - assert len(commits) == 2 - assert commits[1].id == commit_id - - ############# CHECKOUT TO TAG ##################### - - -@pytest.mark.skip(reason="DEV") -def test_concurrency(): - # several repos - # several threads - raise AssertionError diff --git a/packages/postgres-database/tests/test_clusters.py b/packages/postgres-database/tests/test_clusters.py deleted file mode 100644 index 95cd8492965..00000000000 --- a/packages/postgres-database/tests/test_clusters.py +++ /dev/null @@ -1,107 +0,0 @@ -# pylint: disable=no-value-for-parameter -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -from collections.abc import AsyncIterable, Awaitable, Callable - -import pytest -import sqlalchemy as sa -from aiopg.sa.engine import Engine -from aiopg.sa.result import ResultProxy -from pytest_simcore.helpers.faker_factories import random_user -from simcore_postgres_database.errors import ForeignKeyViolation, NotNullViolation -from simcore_postgres_database.models.cluster_to_groups import cluster_to_groups -from simcore_postgres_database.models.clusters import ClusterType, clusters -from simcore_postgres_database.models.users import users - - -@pytest.fixture -async def user_id(aiopg_engine: Engine) -> AsyncIterable[int]: - async with aiopg_engine.acquire() as conn: - # a 'me' user - uid = await conn.scalar( - users.insert().values(**(random_user())).returning(users.c.id) - ) - assert uid is not None - yield uid - # cleanup - async with aiopg_engine.acquire() as conn: - # a 'me' user - uid = await conn.execute(users.delete().where(users.c.id == uid)) - - -@pytest.fixture -async def user_group_id(aiopg_engine: Engine, user_id: int) -> int: - async with aiopg_engine.acquire() as conn: - primary_gid = await conn.scalar( - sa.select(users.c.primary_gid).where(users.c.id == user_id) - ) - assert primary_gid is not None - return primary_gid - - -async def test_cluster_without_owner_forbidden( - create_fake_cluster: Callable[..., Awaitable[int]] -): - with pytest.raises(NotNullViolation): - await create_fake_cluster() - - -async def test_can_create_cluster_with_owner( - user_group_id: int, create_fake_cluster: Callable[..., Awaitable[int]] -): - aws_cluster_id = await create_fake_cluster( - name="test AWS cluster", type=ClusterType.AWS, owner=user_group_id - ) - assert aws_cluster_id > 0 - on_premise_cluster = await create_fake_cluster( - name="test on premise cluster", - type=ClusterType.ON_PREMISE, - owner=user_group_id, - ) - assert on_premise_cluster > 0 - assert on_premise_cluster != aws_cluster_id - - -async def test_cannot_remove_owner_that_owns_cluster( - aiopg_engine: Engine, - user_id: int, - user_group_id: int, - create_fake_cluster: Callable[..., Awaitable[int]], -): - cluster_id = await create_fake_cluster(owner=user_group_id) - # now try removing the user - async with aiopg_engine.acquire() as conn: - with pytest.raises(ForeignKeyViolation): - await conn.execute(users.delete().where(users.c.id == user_id)) - - # now remove the cluster - async with aiopg_engine.acquire() as conn: - await conn.execute(clusters.delete().where(clusters.c.id == cluster_id)) - - # removing the user should work now - async with aiopg_engine.acquire() as conn: - await conn.execute(users.delete().where(users.c.id == user_id)) - - -async def test_cluster_owner_has_all_rights( - aiopg_engine: Engine, - user_group_id: int, - create_fake_cluster: Callable[..., Awaitable[int]], -): - cluster_id = await create_fake_cluster(owner=user_group_id) - - async with aiopg_engine.acquire() as conn: - result: ResultProxy = await conn.execute( - cluster_to_groups.select().where( - cluster_to_groups.c.cluster_id == cluster_id - ) - ) - - assert result.rowcount == 1 - row = await result.fetchone() - assert row is not None - - assert row.read is True - assert row.write is True - assert row.delete is True diff --git a/packages/postgres-database/tests/test_models_payments_methods.py b/packages/postgres-database/tests/test_models_payments_methods.py index 100c0e5431b..cb5b14ee70e 100644 --- a/packages/postgres-database/tests/test_models_payments_methods.py +++ b/packages/postgres-database/tests/test_models_payments_methods.py @@ -10,7 +10,7 @@ from aiopg.sa.result import RowProxy from faker import Faker from pytest_simcore.helpers.faker_factories import random_payment_method -from simcore_postgres_database.errors import UniqueViolation +from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.payments_methods import ( InitPromptAckFlowState, payments_methods, diff --git a/packages/postgres-database/tests/test_models_products_prices.py b/packages/postgres-database/tests/test_models_products_prices.py index 7112f31b612..406158af0bf 100644 --- a/packages/postgres-database/tests/test_models_products_prices.py +++ b/packages/postgres-database/tests/test_models_products_prices.py @@ -4,58 +4,102 @@ # pylint: disable=too-many-arguments +from collections.abc import AsyncIterator + import pytest import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import RowProxy +import sqlalchemy.exc from faker import Faker from pytest_simcore.helpers.faker_factories import random_product -from simcore_postgres_database.errors import CheckViolation, ForeignKeyViolation from simcore_postgres_database.models.products import products from simcore_postgres_database.models.products_prices import products_prices from simcore_postgres_database.utils_products_prices import ( get_product_latest_price_info_or_none, - get_product_latest_stripe_info, + get_product_latest_stripe_info_or_none, is_payment_enabled, ) +from sqlalchemy.engine.row import Row +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine + + +@pytest.fixture +async def connection(asyncpg_engine: AsyncEngine) -> AsyncIterator[AsyncConnection]: + async with asyncpg_engine.connect() as conn: + isolation_level = await conn.get_isolation_level() + assert isolation_level == "READ COMMITTED" + yield conn @pytest.fixture -async def fake_product(connection: SAConnection) -> RowProxy: +async def fake_product(connection: AsyncConnection) -> Row: result = await connection.execute( products.insert() - .values(random_product(group_id=None)) - .returning(sa.literal_column("*")) + .values(random_product(name="tip", group_id=None)) + .returning(sa.literal_column("*")), ) - product = await result.first() - assert product is not None - return product + await connection.commit() + + async with connection.begin(): + result = await connection.execute( + products.insert() + .values(random_product(name="s4l", group_id=None)) + .returning(sa.literal_column("*")), + ) + + return result.one() async def test_creating_product_prices( - connection: SAConnection, fake_product: RowProxy, faker: Faker + asyncpg_engine: AsyncEngine, + connection: AsyncConnection, + fake_product: Row, + faker: Faker, ): # a price per product - result = await connection.execute( - products_prices.insert() - .values( - product_name=fake_product.name, - usd_per_credit=100, - comment="PO Mr X", - stripe_price_id=faker.word(), - stripe_tax_rate_id=faker.word(), + async with connection.begin(): + result = await connection.execute( + products_prices.insert() + .values( + product_name=fake_product.name, + usd_per_credit=100, + comment="PO Mr X", + stripe_price_id=faker.word(), + stripe_tax_rate_id=faker.word(), + ) + .returning(sa.literal_column("*")), ) - .returning(sa.literal_column("*")) - ) - product_prices = await result.first() - assert product_prices + got = result.one() + assert got + + # insert still NOT commited but can read from this connection + read_query = sa.select(products_prices).where( + products_prices.c.product_name == fake_product.name + ) + result = await connection.execute(read_query) + assert result.one()._asdict() == got._asdict() + + assert connection.in_transaction() is True + + # cannot read from other connection though + async with asyncpg_engine.connect() as other_connection: + result = await other_connection.execute(read_query) + assert result.one_or_none() is None + + # AFTER commit + assert connection.in_transaction() is False + async with asyncpg_engine.connect() as yet_another_connection: + result = await yet_another_connection.execute(read_query) + assert result.one()._asdict() == got._asdict() async def test_non_negative_price_not_allowed( - connection: SAConnection, fake_product: RowProxy, faker: Faker + connection: AsyncConnection, fake_product: Row, faker: Faker ): - # negative price not allowed - with pytest.raises(CheckViolation) as exc_info: + + assert not connection.in_transaction() + + # WRITE: negative price not allowed + with pytest.raises(sqlalchemy.exc.IntegrityError) as exc_info: await connection.execute( products_prices.insert().values( product_name=fake_product.name, @@ -67,46 +111,76 @@ async def test_non_negative_price_not_allowed( ) assert exc_info.value + assert connection.in_transaction() + await connection.rollback() + assert not connection.in_transaction() - # zero price is allowed - await connection.execute( - products_prices.insert().values( + # WRITE: zero price is allowed + result = await connection.execute( + products_prices.insert() + .values( product_name=fake_product.name, usd_per_credit=0, # <----- ZERO comment="PO Mr X", stripe_price_id=faker.word(), stripe_tax_rate_id=faker.word(), ) + .returning("*") ) + assert result.one() + + assert connection.in_transaction() + await connection.commit() + assert not connection.in_transaction() + + with pytest.raises(sqlalchemy.exc.ResourceClosedError): + # can only get result once! + assert result.one() + + # READ + result = await connection.execute(sa.select(products_prices)) + assert connection.in_transaction() + + assert result.one() + with pytest.raises(sqlalchemy.exc.ResourceClosedError): + # can only get result once! + assert result.one() + async def test_delete_price_constraints( - connection: SAConnection, fake_product: RowProxy, faker: Faker + connection: AsyncConnection, fake_product: Row, faker: Faker ): # products_prices - await connection.execute( - products_prices.insert().values( - product_name=fake_product.name, - usd_per_credit=10, - comment="PO Mr X", - stripe_price_id=faker.word(), - stripe_tax_rate_id=faker.word(), + async with connection.begin(): + await connection.execute( + products_prices.insert().values( + product_name=fake_product.name, + usd_per_credit=10, + comment="PO Mr X", + stripe_price_id=faker.word(), + stripe_tax_rate_id=faker.word(), + ) ) - ) + # BAD DELETE: # should not be able to delete a product w/o deleting price first - with pytest.raises(ForeignKeyViolation) as exc_info: - await connection.execute(products.delete()) + async with connection.begin(): + with pytest.raises(sqlalchemy.exc.IntegrityError, match="delete") as exc_info: + await connection.execute(products.delete()) - assert exc_info.match("delete") + # NOTE: that asyncpg.exceptions are converted to sqlalchemy.exc + # sqlalchemy.exc.IntegrityError: (sqlalchemy.dialects.postgresql.asyncpg.IntegrityError) : + assert "asyncpg.exceptions.ForeignKeyViolationError" in exc_info.value.args[0] - # this is the correct way to delete - await connection.execute(products_prices.delete()) - await connection.execute(products.delete()) + # GOOD DELETE: this is the correct way to delete + async with connection.begin(): + await connection.execute(products_prices.delete()) + await connection.execute(products.delete()) async def test_get_product_latest_price_or_none( - connection: SAConnection, fake_product: RowProxy, faker: Faker + connection: AsyncConnection, fake_product: Row, faker: Faker ): # undefined product assert ( @@ -130,29 +204,31 @@ async def test_get_product_latest_price_or_none( async def test_price_history_of_a_product( - connection: SAConnection, fake_product: RowProxy, faker: Faker + connection: AsyncConnection, fake_product: Row, faker: Faker ): # initial price - await connection.execute( - products_prices.insert().values( - product_name=fake_product.name, - usd_per_credit=1, - comment="PO Mr X", - stripe_price_id=faker.word(), - stripe_tax_rate_id=faker.word(), + async with connection.begin(): + await connection.execute( + products_prices.insert().values( + product_name=fake_product.name, + usd_per_credit=1, + comment="PO Mr X", + stripe_price_id=faker.word(), + stripe_tax_rate_id=faker.word(), + ) ) - ) # new price - await connection.execute( - products_prices.insert().values( - product_name=fake_product.name, - usd_per_credit=2, - comment="Update by Mr X", - stripe_price_id=faker.word(), - stripe_tax_rate_id=faker.word(), + async with connection.begin(): + await connection.execute( + products_prices.insert().values( + product_name=fake_product.name, + usd_per_credit=2, + comment="Update by Mr X", + stripe_price_id=faker.word(), + stripe_tax_rate_id=faker.word(), + ) ) - ) # latest is 2 USD! assert await get_product_latest_price_info_or_none( @@ -163,29 +239,33 @@ async def test_price_history_of_a_product( async def test_get_product_latest_stripe_info( - connection: SAConnection, fake_product: RowProxy, faker: Faker + connection: AsyncConnection, fake_product: Row, faker: Faker ): stripe_price_id_value = faker.word() stripe_tax_rate_id_value = faker.word() # products_prices - await connection.execute( - products_prices.insert().values( - product_name=fake_product.name, - usd_per_credit=10, - comment="PO Mr X", - stripe_price_id=stripe_price_id_value, - stripe_tax_rate_id=stripe_tax_rate_id_value, + async with connection.begin(): + await connection.execute( + products_prices.insert().values( + product_name=fake_product.name, + usd_per_credit=10, + comment="PO Mr X", + stripe_price_id=stripe_price_id_value, + stripe_tax_rate_id=stripe_tax_rate_id_value, + ) ) + + # undefined product + undefined_product_stripe_info = await get_product_latest_stripe_info_or_none( + connection, product_name="undefined" ) + assert undefined_product_stripe_info is None # defined product - product_stripe_info = await get_product_latest_stripe_info( + product_stripe_info = await get_product_latest_stripe_info_or_none( connection, product_name=fake_product.name ) + assert product_stripe_info assert product_stripe_info[0] == stripe_price_id_value assert product_stripe_info[1] == stripe_tax_rate_id_value - - # undefined product - with pytest.raises(ValueError) as exc_info: - await get_product_latest_stripe_info(connection, product_name="undefined") diff --git a/packages/postgres-database/tests/test_services_consume_filetypes.py b/packages/postgres-database/tests/test_services_consume_filetypes.py index f7279929907..efe0a083c6f 100644 --- a/packages/postgres-database/tests/test_services_consume_filetypes.py +++ b/packages/postgres-database/tests/test_services_consume_filetypes.py @@ -15,7 +15,7 @@ FAKE_FILE_CONSUMER_SERVICES, list_supported_filetypes, ) -from simcore_postgres_database.errors import CheckViolation +from simcore_postgres_database.aiopg_errors import CheckViolation from simcore_postgres_database.models.services import services_meta_data from simcore_postgres_database.models.services_consume_filetypes import ( services_consume_filetypes, diff --git a/packages/postgres-database/tests/test_users.py b/packages/postgres-database/tests/test_users.py index 1c10636e772..8bfe2814ada 100644 --- a/packages/postgres-database/tests/test_users.py +++ b/packages/postgres-database/tests/test_users.py @@ -11,12 +11,15 @@ from aiopg.sa.result import ResultProxy, RowProxy from faker import Faker from pytest_simcore.helpers.faker_factories import random_user -from simcore_postgres_database.errors import InvalidTextRepresentation, UniqueViolation +from simcore_postgres_database.aiopg_errors import ( + InvalidTextRepresentation, + UniqueViolation, +) from simcore_postgres_database.models.users import UserRole, UserStatus, users from simcore_postgres_database.utils_users import ( UsersRepo, - _generate_random_chars, _generate_username_from_email, + generate_alternative_username, ) from sqlalchemy.sql import func @@ -89,7 +92,7 @@ async def test_unique_username( faker, status=UserStatus.ACTIVE, name="pcrespov", - email="some-fanky-name@email.com", + email="p@email.com", first_name="Pedro", last_name="Crespo Valero", ) @@ -113,7 +116,7 @@ async def test_unique_username( await connection.scalar(users.insert().values(data).returning(users.c.id)) # and another one - data["name"] += _generate_random_chars() + data["name"] = generate_alternative_username(data["name"]) data["email"] = faker.email() await connection.scalar(users.insert().values(data).returning(users.c.id)) diff --git a/packages/pytest-simcore/src/pytest_simcore/disk_usage_monitoring.py b/packages/pytest-simcore/src/pytest_simcore/disk_usage_monitoring.py index 37ab7de0a0d..c4ebece8cb6 100644 --- a/packages/pytest-simcore/src/pytest_simcore/disk_usage_monitoring.py +++ b/packages/pytest-simcore/src/pytest_simcore/disk_usage_monitoring.py @@ -7,7 +7,7 @@ _DEFAULT_THREADHOLD_MB = 512 -def pytest_addoption(parser): +def pytest_addoption(parser: pytest.Parser): simcore_group = parser.getgroup("simcore") simcore_group.addoption( "--disk-usage", action="store_true", help="Enable disk usage monitoring" diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_api_proxy.py b/packages/pytest-simcore/src/pytest_simcore/docker_api_proxy.py new file mode 100644 index 00000000000..1871eefdfed --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/docker_api_proxy.py @@ -0,0 +1,52 @@ +import logging + +import pytest +from aiohttp import ClientSession, ClientTimeout +from pydantic import TypeAdapter +from settings_library.docker_api_proxy import DockerApiProxysettings +from tenacity import before_sleep_log, retry, stop_after_delay, wait_fixed + +from .helpers.docker import get_service_published_port +from .helpers.host import get_localhost_ip +from .helpers.typing_env import EnvVarsDict + +_logger = logging.getLogger(__name__) + + +@retry( + wait=wait_fixed(1), + stop=stop_after_delay(10), + before_sleep=before_sleep_log(_logger, logging.INFO), + reraise=True, +) +async def _wait_till_docker_api_proxy_is_responsive( + settings: DockerApiProxysettings, +) -> None: + async with ClientSession(timeout=ClientTimeout(1, 1, 1, 1, 1)) as client: + response = await client.get(f"{settings.base_url}/version") + assert response.status == 200, await response.text() + + +@pytest.fixture +async def docker_api_proxy_settings( + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict +) -> DockerApiProxysettings: + """Returns the settings of a redis service that is up and responsive""" + + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] + assert f"{prefix}_docker-api-proxy" in docker_stack["services"] + + published_port = get_service_published_port( + "docker-api-proxy", int(env_vars_for_docker_compose["DOCKER_API_PROXY_PORT"]) + ) + + settings = TypeAdapter(DockerApiProxysettings).validate_python( + { + "DOCKER_API_PROXY_HOST": get_localhost_ip(), + "DOCKER_API_PROXY_PORT": published_port, + } + ) + + await _wait_till_docker_api_proxy_is_responsive(settings) + + return settings diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py index 2085e62a365..61207aa61a5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py @@ -2,10 +2,10 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -""" Fixtures to create docker-compose.yaml configuration files (as in Makefile) +"""Fixtures to create docker-compose.yaml configuration files (as in Makefile) - - Basically runs `docker compose config - - Services in stack can be selected using 'core_services_selection', 'ops_services_selection' fixtures +- Basically runs `docker compose config +- Services in stack can be selected using 'core_services_selection', 'ops_services_selection' fixtures """ @@ -391,6 +391,10 @@ def _filter_services_and_dump( if "environment" in service: service["environment"] = _minio_fix(service["environment"]) + if name == "postgres": + # NOTE: # -c fsync=off is not recommended for production as this disable writing to disk https://pythonspeed.com/articles/faster-db-tests/ + service["command"] += ["-c", "fsync=off"] + # updates current docker-compose (also versioned ... do not change by hand) with docker_compose_path.open("wt") as fh: yaml.dump(content, fh, default_flow_style=False) diff --git a/packages/pytest-simcore/src/pytest_simcore/file_extra.py b/packages/pytest-simcore/src/pytest_simcore/file_extra.py index 99ba8cc031c..b50e96d8f6c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/file_extra.py +++ b/packages/pytest-simcore/src/pytest_simcore/file_extra.py @@ -1,16 +1,31 @@ import logging -from collections.abc import Callable +from collections.abc import Callable, Iterable, Iterator from pathlib import Path import pytest from faker import Faker -from pydantic import ByteSize -from pytest_simcore.helpers.logging_tools import log_context +from pydantic import ByteSize, NonNegativeInt + +from .helpers.logging_tools import log_context @pytest.fixture -def create_file_of_size(tmp_path: Path, faker: Faker) -> Callable[[ByteSize], Path]: - # NOTE: cleanup is done by tmp_path fixture +def fake_file_name(tmp_path: Path, faker: Faker) -> Iterable[Path]: + file = tmp_path / faker.file_name() + + yield file + + if file.exists(): + file.unlink() + assert not file.exists() + + +@pytest.fixture +def create_file_of_size( + tmp_path: Path, faker: Faker +) -> Iterator[Callable[[ByteSize], Path]]: + created_files = [] + def _creator(size: ByteSize, name: str | None = None) -> Path: file: Path = tmp_path / (name or faker.file_name()) if not file.parent.exists(): @@ -21,58 +36,83 @@ def _creator(size: ByteSize, name: str | None = None) -> Path: assert file.exists() assert file.stat().st_size == size + created_files.append(file) return file - return _creator + yield _creator + + for file in created_files: + if file.exists(): + file.unlink() + assert not file.exists() + + +def _create_random_content( + faker: Faker, + *, + base_dir: Path, + file_min_size: ByteSize, + file_max_size: ByteSize, + remaining_size: ByteSize, + depth: NonNegativeInt | None, +) -> ByteSize: + if remaining_size <= 0: + return remaining_size + + file_size = ByteSize( + faker.pyint( + min_value=min(file_min_size, remaining_size), + max_value=min(remaining_size, file_max_size), + ) + ) + if depth is None: + depth = faker.pyint(0, 5) + file_path = base_dir / f"{faker.unique.file_path(depth=depth, absolute=False)}" + file_path.parent.mkdir(parents=True, exist_ok=True) + assert not file_path.exists() + with file_path.open("wb") as fp: + fp.write(f"I am a {file_size.human_readable()} file".encode()) + fp.truncate(file_size) + assert file_path.exists() + + return ByteSize(remaining_size - file_size) @pytest.fixture def create_folder_of_size_with_multiple_files( tmp_path: Path, faker: Faker -) -> Callable[[ByteSize, ByteSize, ByteSize], Path]: +) -> Callable[[ByteSize, ByteSize, ByteSize, Path | None], Path]: def _create_folder_of_size_with_multiple_files( directory_size: ByteSize, file_min_size: ByteSize, file_max_size: ByteSize, + working_directory: Path | None, + depth: NonNegativeInt | None = None, ) -> Path: # Helper function to create random files and directories assert file_min_size > 0 assert file_min_size <= file_max_size - def create_random_content(base_dir: Path, remaining_size: ByteSize) -> ByteSize: - if remaining_size <= 0: - return remaining_size - - # Decide to create a file or a subdirectory - # Create a file - file_size = ByteSize( - faker.pyint( - min_value=min(file_min_size, remaining_size), - max_value=min(remaining_size, file_max_size), - ) - ) # max file size 1MB - file_path = base_dir / f"{faker.file_path(depth=4, absolute=False)}" - file_path.parent.mkdir(parents=True, exist_ok=True) - assert not file_path.exists() - with file_path.open("wb") as fp: - fp.write(f"I am a {file_size.human_readable()} file".encode()) - fp.truncate(file_size) - assert file_path.exists() - - return ByteSize(remaining_size - file_size) - # Recursively create content in the temporary directory + folder_path = working_directory or tmp_path remaining_size = directory_size with log_context( logging.INFO, msg=f"creating {directory_size.human_readable()} of random files " - f"(up to {file_max_size.human_readable()}) in {tmp_path}", + f"(up to {file_max_size.human_readable()}) in {folder_path}", ) as ctx: num_files_created = 0 while remaining_size > 0: - remaining_size = create_random_content(tmp_path, remaining_size) + remaining_size = _create_random_content( + faker, + base_dir=folder_path, + file_min_size=file_min_size, + file_max_size=file_max_size, + remaining_size=remaining_size, + depth=depth, + ) num_files_created += 1 ctx.logger.info("created %s files", num_files_created) - return tmp_path + return folder_path return _create_folder_of_size_with_multiple_files diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py b/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py index ba92081b9e4..fc931cbebd5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py @@ -1,12 +1,11 @@ -""" Extends assertions for testing +"""Extends assertions for testing""" -""" from http import HTTPStatus from pprint import pformat from aiohttp import ClientResponse from servicelib.aiohttp import status -from servicelib.aiohttp.rest_responses import unwrap_envelope +from servicelib.rest_responses import unwrap_envelope from servicelib.status_codes_utils import get_code_display_name, is_error @@ -30,8 +29,10 @@ async def assert_status( data, error = unwrap_envelope(json_response) assert response.status == expected_status_code, ( - f"received {response.status}: ({data},{error})" - f", expected {get_code_display_name(expected_status_code)} : {expected_msg or ''}" + f"Expected: {get_code_display_name(expected_status_code)} : {expected_msg or ''}" + f"Got: {response.status}:\n" + f" - data :{pformat(data)}\n" + f" - error:{pformat(error)}\n)" ) if is_error(expected_status_code): diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/catalog_services.py b/packages/pytest-simcore/src/pytest_simcore/helpers/catalog_services.py new file mode 100644 index 00000000000..90a2508111f --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/catalog_services.py @@ -0,0 +1,41 @@ +# pylint: disable=not-context-manager +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from datetime import datetime +from typing import Any, Protocol + +from models_library.products import ProductName + + +class CreateFakeServiceDataCallable(Protocol): + """Signature for services/catalog/tests/unit/with_dbs/conftest.py::create_fake_service_data""" + + def __call__( + self, + key, + version, + team_access: str | None = None, + everyone_access: str | None = None, + product: ProductName = "osparc", + deprecated: datetime | None = None, # DB column + ) -> tuple[dict[str, Any], ...]: # type: ignore + """ + Returns a fake factory that creates catalog DATA that can be used to fill + both services_meta_data and services_access_rights tables + + + Example: + fake_service, *fake_access_rights = create_fake_service_data( + "simcore/services/dynamic/jupyterlab", + "0.0.1", + team_access="xw", + everyone_access="x", + product=target_product, + ), + + owner_access, team_access, everyone_access = fake_access_rights + + """ diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/comparing.py b/packages/pytest-simcore/src/pytest_simcore/helpers/comparing.py new file mode 100644 index 00000000000..31d2e986806 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/comparing.py @@ -0,0 +1,61 @@ +import asyncio +import hashlib +from concurrent.futures import ProcessPoolExecutor +from pathlib import Path +from typing import TypeAlias + +import aiofiles +from servicelib.file_utils import create_sha256_checksum + +_FilesInfo: TypeAlias = dict[str, Path] + + +def get_relative_to(folder: Path, file: Path) -> str: + return f"{file.relative_to(folder)}" + + +async def assert_same_file_content(path_1: Path, path_2: Path) -> None: + async with aiofiles.open(path_1, "rb") as f1, aiofiles.open(path_2, "rb") as f2: + checksum_1 = await create_sha256_checksum(f1) + checksum_2 = await create_sha256_checksum(f2) + assert checksum_1 == checksum_2 + + +def get_files_info_from_path(folder: Path) -> _FilesInfo: + return {get_relative_to(folder, f): f for f in folder.rglob("*") if f.is_file()} + + +def compute_hash(file_path: Path) -> tuple[Path, str]: + with Path.open(file_path, "rb") as file_to_hash: + file_hash = hashlib.md5() # noqa: S324 + chunk = file_to_hash.read(8192) + while chunk: + file_hash.update(chunk) + chunk = file_to_hash.read(8192) + + return file_path, file_hash.hexdigest() + + +async def compute_hashes(file_paths: list[Path]) -> dict[Path, str]: + """given a list of files computes hashes for the files on a process pool""" + + loop = asyncio.get_event_loop() + + with ProcessPoolExecutor() as prcess_pool_executor: + tasks = [ + loop.run_in_executor(prcess_pool_executor, compute_hash, file_path) + for file_path in file_paths + ] + # pylint: disable=unnecessary-comprehension + # see return value of _compute_hash it is a tuple, mapping list[Tuple[Path,str]] to Dict[Path, str] here + return dict(await asyncio.gather(*tasks)) + + +async def assert_same_contents(file_info1: _FilesInfo, file_info2: _FilesInfo) -> None: + assert set(file_info1.keys()) == set(file_info2.keys()) + + hashes_1 = await compute_hashes(list(file_info1.values())) + hashes_2 = await compute_hashes(list(file_info2.values())) + + for key in file_info1: + assert hashes_1[file_info1[key]] == hashes_2[file_info2[key]] diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_catalog.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_catalog.py index c0cc6488b76..ac27ca18cb0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_catalog.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_catalog.py @@ -1,19 +1,25 @@ +from simcore_service_api_server.services_http.catalog import TruncatedCatalogServiceOut + + def create_service_out(**overrides): # FIXME: should change when schema changes obj = { "name": "Fast Counter", - "key": "simcore/service/dynanic/itis/sim4life" - if overrides.get("type") == "dynamic" - else "simcore/services/comp/itis/sleeper", + "description": "Counts fast", + "key": ( + "simcore/services/dynamic/itis/sim4life" + if overrides.get("type") == "dynamic" + else "simcore/services/comp/itis/sleeper" + ), "version": "1.0.0", "integration-version": "1.0.0", "type": "computational", "authors": [ { "name": "Jim Knopf", - "email": ["sun@sense.eight", "deleen@minbar.bab"], - "affiliation": ["Sense8", "Babylon 5"], + "email": "sun@sense.eight", + "affiliation": "Sense8", } ], "contact": "lab@net.flix", @@ -22,91 +28,6 @@ def create_service_out(**overrides): "owner": "user@example.com", } obj.update(**overrides) - return obj - -def create_service_out2(**overrides): - # - # Creates fake from here - # - # https://github.com/ITISFoundation/osparc-simcore/blob/master/services/catalog/src/simcore_service_catalog/models/schemas/services.py - # - # docker exec -it $(docker ps --filter="ancestor=local/catalog:development" -q) - # put file in https://json-schema-faker.js.org/ and get fake output - # - - DATA = { - "name": "officia", - "description": "sunt elit", - "key": "simcore/services/dynamic/xO/WAn/1-/$meZpaVN)/t_&[Q0/TC7Wn#y'j/MilxW/kTtV_{ str: try: # 'passlib' will be used only if already installed. @@ -135,6 +142,11 @@ def random_project(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: "workbench": {}, "published": False, } + + icon = fake.random_element([random_icon_url(fake), None]) # nullable + if icon: + data["ui"] = {"icon": icon} + assert set(data.keys()).issubset({c.name for c in projects.columns}) data.update(overrides) @@ -169,16 +181,19 @@ def _get_comp_pipeline_test_states(): ] -def fake_pipeline(**overrides) -> dict[str, Any]: +def fake_pipeline(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: data = { "dag_adjacency_list": json.dumps({}), - "state": random.choice(_get_comp_pipeline_test_states()), + "state": fake.random_element(_get_comp_pipeline_test_states()), } data.update(overrides) return data -def fake_task_factory(first_internal_id=1) -> Callable: +def fake_task_factory( + first_internal_id=1, + fake: Faker = DEFAULT_FAKER, +) -> Callable: # Each new instance of fake_task will get a copy _index_in_sequence = itertools.count(start=first_internal_id) @@ -193,7 +208,7 @@ def fake_task(**overrides) -> dict[str, Any]: "inputs": json.dumps({}), "outputs": json.dumps({}), "image": json.dumps({}), - "state": random.choice(_get_comp_pipeline_test_states()), + "state": fake.random_element(_get_comp_pipeline_test_states()), "start": t0 + timedelta(seconds=1), "end": t0 + timedelta(minutes=5), } @@ -205,6 +220,7 @@ def fake_task(**overrides) -> dict[str, Any]: def random_product( + *, group_id: int | None = None, registration_email_template: str | None = None, fake: Faker = DEFAULT_FAKER, @@ -250,11 +266,52 @@ def random_product( "group_id": group_id, } + if ui := fake.random_element( + [ + None, + # Examples from https://github.com/itisfoundation/osparc-simcore/blob/1dcd369717959348099cc6241822a1f0aff0382c/services/static-webserver/client/source/resource/osparc/new_studies.json + { + "categories": [ + {"id": "precomputed", "title": "Precomputed"}, + { + "id": "personalized", + "title": "Personalized", + "description": fake.sentence(), + }, + ] + }, + ] + ): + data.update(ui=ui) + assert set(data.keys()).issubset({c.name for c in products.columns}) data.update(overrides) return data +def random_product_price( + *, product_name: str, fake: Faker = DEFAULT_FAKER, **overrides +) -> dict[str, Any]: + from simcore_postgres_database.models.products_prices import products_prices + + data = { + "product_name": product_name, + "usd_per_credit": fake.pydecimal(left_digits=2, right_digits=2, positive=True), + "min_payment_amount_usd": fake.pydecimal( + left_digits=2, right_digits=2, positive=True + ), + "comment": fake.sentence(), + "valid_from": fake.date_time_this_decade(), + "stripe_price_id": fake.uuid4(), + "stripe_tax_rate_id": fake.uuid4(), + } + + assert set(data.keys()).issubset({c.name for c in products_prices.columns}) + + data.update(overrides) + return data + + def utcnow() -> datetime: return datetime.now(tz=UTC) @@ -319,7 +376,7 @@ def random_payment_transaction( def random_payment_autorecharge( - primary_payment_method_id: str = DEFAULT_FAKER.uuid4(), + primary_payment_method_id: str = "UNDEFINED__", fake: Faker = DEFAULT_FAKER, **overrides, ) -> dict[str, Any]: @@ -327,6 +384,9 @@ def random_payment_autorecharge( payments_autorecharge, ) + if primary_payment_method_id == "UNDEFINED__": + primary_payment_method_id = fake.uuid4() + data = { "wallet_id": fake.pyint(), "enabled": True, @@ -383,21 +443,23 @@ def random_service_meta_data( ) -> dict[str, Any]: from simcore_postgres_database.models.services import services_meta_data - _pick_from = random.choice _version = ".".join([str(fake.pyint()) for _ in range(3)]) _name = fake.name() data: dict[str, Any] = { # required - "key": f"simcore/services/{_pick_from(['dynamic', 'computational'])}/{_name}", + "key": f"simcore/services/{fake.random_element(['dynamic', 'computational'])}/{_name}", "version": _version, "name": f"the-{_name}-service", # display "description": fake.sentence(), # optional "description_ui": fake.pybool(), "owner": owner_primary_gid, - "thumbnail": _pick_from([fake.image_url(), None]), # nullable - "version_display": _pick_from([f"v{_version}", None]), # nullable + "thumbnail": fake.random_element( + [random_thumbnail_url(fake), None] + ), # nullable + "icon": fake.random_element([random_icon_url(fake), None]), # nullable + "version_display": fake.random_element([f"v{_version}", None]), # nullable "classifiers": [], # has default "quality": {}, # has default "deprecated": None, # nullable @@ -437,3 +499,35 @@ def random_service_access_rights( data.update(**overrides) return data + + +def random_itis_vip_available_download_item( + identifier: int, + fake: Faker = DEFAULT_FAKER, + features_functionality: str = "Posable", + **overrides, +): + features_str = ( + "{" + f"name: {fake.name()} Right Hand," # w/o spaces + f" version: V{fake.pyint()}.0, " # w/ x2 spaces + f"sex: Male, age: 8 years," # w/o spaces + f"date: {fake.date()} , " # w/ x2 spaces prefix, x1 space suffix + f"ethnicity: Caucasian, functionality: {features_functionality} " + "}" + ) + + data = { + "ID": identifier, + "Description": fake.sentence(), + "Thumbnail": fake.image_url(), + "Features": features_str, + "DOI": fake.bothify(text="10.####/ViP#####-##-#"), + "LicenseKey": fake.bothify(text="MODEL_????_V#"), + "LicenseVersion": fake.bothify(text="V#.0"), + "Protection": fake.random_element(elements=["Code", "PayPal"]), + "AvailableFromURL": fake.random_element(elements=[None, fake.url()]), + } + + data.update(**overrides) + return data diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/fastapi.py b/packages/pytest-simcore/src/pytest_simcore/helpers/fastapi.py new file mode 100644 index 00000000000..02e0d2d4ad5 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/fastapi.py @@ -0,0 +1,11 @@ +import httpx +from fastapi import FastAPI +from yarl import URL + + +def url_from_operation_id( + client: httpx.AsyncClient, app: FastAPI, operation_id: str, **path_params +) -> URL: + return URL(f"{client.base_url}").with_path( + app.url_path_for(operation_id, **path_params) + ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_assert_checks.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_assert_checks.py new file mode 100644 index 00000000000..4443241192e --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_assert_checks.py @@ -0,0 +1,89 @@ +"""Extends assertions for testing""" + +import re +from http import HTTPStatus +from pprint import pformat +from typing import Any, TypeVar + +import httpx +from models_library.generics import Envelope +from pydantic import TypeAdapter +from servicelib.aiohttp import status +from servicelib.status_codes_utils import get_code_display_name, is_error + +T = TypeVar("T") + + +def assert_status( + response: httpx.Response, + expected_status_code: int, + response_model: type[T] | None, + *, + expected_msg: str | None = None, + expect_envelope: bool = True, +) -> tuple[T | None, Any]: + """ + Asserts for enveloped responses + """ + # raises ValueError if cannot be converted + expected_status_code = HTTPStatus(expected_status_code) + + assert ( + response.status_code == expected_status_code + ), f"received {response.status_code}: {response.text}, expected {get_code_display_name(expected_status_code)}" + + # response + if expected_status_code == status.HTTP_204_NO_CONTENT: + assert response.text == "" + return None, None + if expect_envelope: + validated_response = TypeAdapter(Envelope[response_model]).validate_json( + response.text + ) + data = validated_response.data + error = validated_response.error + if is_error(expected_status_code): + _do_assert_error( + data, + error, + expected_status_code, + expected_msg, + ) + else: + assert data is not None + return data, error + + if is_error(expected_status_code): + msg = "If you need it implement it" + raise NotImplementedError(msg) + + data = TypeAdapter(response_model).validate_json(response.text) + return data, None + + +def _do_assert_error( + data, + error, + expected_status_code: int, + expected_msg: list[str] | str | list[re.Pattern[str]] | re.Pattern[str] | None, +) -> None: + assert not data, pformat(data) + assert error, pformat(error) + + assert is_error(expected_status_code) + + details = error.get("errors", []) + assert isinstance(details, list) + + if expected_msg: + assert details is not None + # find the expected msg are in the details + if isinstance(expected_msg, list): + list_expected_msg = expected_msg + else: + list_expected_msg = [expected_msg] + + for msg in list_expected_msg: + assert any( + re.search(msg, e) for e in details + ), f"could not find {msg=} in {details=}" diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py index 1621713acb3..b3a01381b7a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py @@ -6,7 +6,7 @@ import sqlalchemy as sa from psycopg2 import OperationalError from simcore_postgres_database.models.base import metadata -from sqlalchemy.ext.asyncio import AsyncEngine +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine class PostgresTestConfig(TypedDict): @@ -76,25 +76,46 @@ def migrated_pg_tables_context( def is_postgres_responsive(url) -> bool: """Check if something responds to ``url``""" try: - engine = sa.create_engine(url) - conn = engine.connect() + sync_engine = sa.create_engine(url) + conn = sync_engine.connect() conn.close() except OperationalError: return False return True -async def _insert_and_get_row( - conn, table: sa.Table, values: dict[str, Any], pk_col: sa.Column, pk_value: Any +async def _async_insert_and_get_row( + conn: AsyncConnection, + table: sa.Table, + values: dict[str, Any], + pk_col: sa.Column, + pk_value: Any, ): result = await conn.execute(table.insert().values(**values).returning(pk_col)) - row = result.first() + row = result.one() # NOTE: DO NO USE row[pk_col] since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) assert getattr(row, pk_col.name) == pk_value result = await conn.execute(sa.select(table).where(pk_col == pk_value)) - return result.first() + return result.one() + + +def _sync_insert_and_get_row( + conn: sa.engine.Connection, + table: sa.Table, + values: dict[str, Any], + pk_col: sa.Column, + pk_value: Any, +): + result = conn.execute(table.insert().values(**values).returning(pk_col)) + row = result.one() + + # NOTE: DO NO USE row[pk_col] since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) + assert getattr(row, pk_col.name) == pk_value + + result = conn.execute(sa.select(table).where(pk_col == pk_value)) + return result.one() @asynccontextmanager @@ -108,10 +129,12 @@ async def insert_and_get_row_lifespan( ) -> AsyncIterator[dict[str, Any]]: # insert & get async with sqlalchemy_async_engine.begin() as conn: - row = await _insert_and_get_row( + row = await _async_insert_and_get_row( conn, table=table, values=values, pk_col=pk_col, pk_value=pk_value ) + assert row + # NOTE: DO NO USE dict(row) since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) # pylint: disable=protected-access yield row._asdict() @@ -119,3 +142,35 @@ async def insert_and_get_row_lifespan( # delete row async with sqlalchemy_async_engine.begin() as conn: await conn.execute(table.delete().where(pk_col == pk_value)) + + +@contextmanager +def sync_insert_and_get_row_lifespan( + sqlalchemy_sync_engine: sa.engine.Engine, + *, + table: sa.Table, + values: dict[str, Any], + pk_col: sa.Column, + pk_value: Any, +) -> Iterator[dict[str, Any]]: + """sync version of insert_and_get_row_lifespan. + + TIP: more convenient for **module-scope fixtures** that setup the + database tables before the app starts since it does not require an `event_loop` + fixture (which is funcition-scoped ) + """ + # insert & get + with sqlalchemy_sync_engine.begin() as conn: + row = _sync_insert_and_get_row( + conn, table=table, values=values, pk_col=pk_col, pk_value=pk_value + ) + + assert row + + # NOTE: DO NO USE dict(row) since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) + # pylint: disable=protected-access + yield row._asdict() + + # delete row + with sqlalchemy_sync_engine.begin() as conn: + conn.execute(table.delete().where(pk_col == pk_value)) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 2f0a03b575d..61d630d994c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -4,12 +4,16 @@ from typing import Final import aiofiles +import httpx import orjson -from aiohttp import ClientSession from aws_library.s3 import MultiPartUploadLinks -from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart +from fastapi import status +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + FileUploadSchema, + UploadedPart, +) from pydantic import AnyUrl, ByteSize, TypeAdapter -from servicelib.aiohttp import status from servicelib.utils import limited_as_completed, logged_gather from types_aiobotocore_s3 import S3Client @@ -37,7 +41,7 @@ async def _file_sender( async def upload_file_part( - session: ClientSession, + session: httpx.AsyncClient, file: Path, part_index: int, file_offset: int, @@ -48,11 +52,11 @@ async def upload_file_part( raise_while_uploading: bool = False, ) -> tuple[int, ETag]: print( - f"--> uploading {this_file_chunk_size=} of {file=}, [{part_index+1}/{num_parts}]..." + f"--> uploading {this_file_chunk_size=} of {file=}, [{part_index + 1}/{num_parts}]..." ) response = await session.put( str(upload_url), - data=_file_sender( + content=_file_sender( file, offset=file_offset, bytes_to_send=this_file_chunk_size, @@ -64,12 +68,12 @@ async def upload_file_part( ) response.raise_for_status() # NOTE: the response from minio does not contain a json body - assert response.status == status.HTTP_200_OK + assert response.status_code == status.HTTP_200_OK assert response.headers assert "Etag" in response.headers received_e_tag = orjson.loads(response.headers["Etag"]) print( - f"--> completed upload {this_file_chunk_size=} of {file=}, [{part_index+1}/{num_parts}], {received_e_tag=}" + f"--> completed upload {this_file_chunk_size=} of {file=}, [{part_index + 1}/{num_parts}], {received_e_tag=}" ) return (part_index, received_e_tag) @@ -80,7 +84,7 @@ async def upload_file_to_presigned_link( file_size = file.stat().st_size with log_context(logging.INFO, msg=f"uploading {file} via {file_upload_link=}"): - async with ClientSession() as session: + async with httpx.AsyncClient() as session: file_chunk_size = int(file_upload_link.chunk_size) num_urls = len(file_upload_link.urls) last_chunk_size = file_size - file_chunk_size * (num_urls - 1) @@ -100,7 +104,7 @@ async def upload_file_to_presigned_link( upload_url, ) ) - results = await logged_gather(*upload_tasks, max_concurrency=0) + results = await logged_gather(*upload_tasks, max_concurrency=20) return [UploadedPart(number=index + 1, e_tag=e_tag) for index, e_tag in results] diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils.py b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils.py new file mode 100644 index 00000000000..39c8e2d91d7 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils.py @@ -0,0 +1,47 @@ +import logging +from dataclasses import dataclass +from pathlib import Path +from typing import Any, TypedDict + +import sqlalchemy as sa +from faker import Faker +from models_library.basic_types import SHA256Str +from pydantic import ByteSize +from simcore_postgres_database.storage_models import projects +from sqlalchemy.ext.asyncio import AsyncEngine + +log = logging.getLogger(__name__) + + +async def get_updated_project( + sqlalchemy_async_engine: AsyncEngine, project_id: str +) -> dict[str, Any]: + async with sqlalchemy_async_engine.connect() as conn: + result = await conn.execute( + sa.select(projects).where(projects.c.uuid == project_id) + ) + row = result.one() + return row._asdict() + + +class FileIDDict(TypedDict): + path: Path + sha256_checksum: SHA256Str + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ProjectWithFilesParams: + num_nodes: int + allowed_file_sizes: tuple[ByteSize, ...] + workspace_files_count: int + allowed_file_checksums: tuple[SHA256Str, ...] = None # type: ignore # NOTE: OK for testing + + def __post_init__(self): + if self.allowed_file_checksums is None: + # generate some random checksums for the corresponding file sizes + faker = Faker() + checksums = tuple(faker.sha256() for _ in self.allowed_file_sizes) + object.__setattr__(self, "allowed_file_checksums", checksums) + + def __repr__(self) -> str: + return f"ProjectWithFilesParams: #nodes={self.num_nodes}, file sizes={[_.human_readable() for _ in self.allowed_file_sizes]}" diff --git a/services/storage/tests/helpers/utils_file_meta_data.py b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_file_meta_data.py similarity index 64% rename from services/storage/tests/helpers/utils_file_meta_data.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_file_meta_data.py index f6b133bbdda..c5566d7030e 100644 --- a/services/storage/tests/helpers/utils_file_meta_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_file_meta_data.py @@ -1,12 +1,12 @@ -from aiopg.sa.engine import Engine from aws_library.s3 import UploadID from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import StorageFileID from simcore_postgres_database.storage_models import file_meta_data +from sqlalchemy.ext.asyncio import AsyncEngine async def assert_file_meta_data_in_db( - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, *, file_id: StorageFileID, expected_entry_exists: bool, @@ -18,11 +18,11 @@ async def assert_file_meta_data_in_db( if expected_entry_exists and expected_file_size is None: assert True, "Invalid usage of assertion, expected_file_size cannot be None" - async with aiopg_engine.acquire() as conn: + async with sqlalchemy_async_engine.connect() as conn: result = await conn.execute( file_meta_data.select().where(file_meta_data.c.file_id == f"{file_id}") ) - db_data = await result.fetchall() + db_data = result.fetchall() assert db_data is not None assert len(db_data) == (1 if expected_entry_exists else 0), ( f"{file_id} was not found!" @@ -33,32 +33,25 @@ async def assert_file_meta_data_in_db( if expected_entry_exists: row = db_data[0] assert ( - row[file_meta_data.c.file_size] == expected_file_size + row.file_size == expected_file_size ), f"entry in file_meta_data was not initialized correctly, size should be set to {expected_file_size}" if expected_upload_id: assert ( - row[file_meta_data.c.upload_id] is not None + row.upload_id is not None ), "multipart upload shall have an upload_id, it is missing!" else: assert ( - row[file_meta_data.c.upload_id] is None + row.upload_id is None ), "single file upload should not have an upload_id" if expected_upload_expiration_date: - assert row[ - file_meta_data.c.upload_expires_at - ], "no upload expiration date!" + assert row.upload_expires_at, "no upload expiration date!" else: - assert ( - row[file_meta_data.c.upload_expires_at] is None - ), "expiration date should be NULL" + assert row.upload_expires_at is None, "expiration date should be NULL" if expected_sha256_checksum: assert ( - SHA256Str(row[file_meta_data.c.sha256_checksum]) - == expected_sha256_checksum + SHA256Str(row.sha256_checksum) == expected_sha256_checksum ), "invalid sha256_checksum" else: - assert ( - row[file_meta_data.c.sha256_checksum] is None - ), "expected sha256_checksum was None" - upload_id = row[file_meta_data.c.upload_id] + assert row.sha256_checksum is None, "expected sha256_checksum was None" + upload_id = row.upload_id return upload_id diff --git a/services/storage/tests/helpers/utils_project.py b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_project.py similarity index 100% rename from services/storage/tests/helpers/utils_project.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_project.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py index 062a33d693a..a9d7b3fcdd7 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py @@ -1,4 +1,6 @@ +import contextlib import re +from collections.abc import AsyncIterator from datetime import datetime from typing import Any, TypedDict @@ -11,7 +13,7 @@ from simcore_service_webserver.login._constants import MSG_LOGGED_IN from simcore_service_webserver.login._registration import create_invitation_token from simcore_service_webserver.login.storage import AsyncpgStorage, get_plugin_storage -from simcore_service_webserver.products.api import list_products +from simcore_service_webserver.products.products_service import list_products from simcore_service_webserver.security.api import clean_auth_policy_cache from yarl import URL @@ -186,6 +188,30 @@ async def __aexit__(self, *args): return await super().__aexit__(*args) +@contextlib.asynccontextmanager +async def switch_client_session_to( + client: TestClient, user: UserInfoDict +) -> AsyncIterator[TestClient]: + assert client.app + + await client.post(f'{client.app.router["auth_logout"].url_for()}') + # sometimes 4xx if user already logged out. Ignore + + resp = await client.post( + f'{client.app.router["auth_login"].url_for()}', + json={ + "email": user["email"], + "password": user["raw_password"], + }, + ) + await assert_status(resp, status.HTTP_200_OK) + + yield client + + resp = await client.post(f'{client.app.router["auth_logout"].url_for()}') + await assert_status(resp, status.HTTP_200_OK) + + class NewInvitation(NewUser): def __init__( self, diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index fbcfaa7b474..99ee393f394 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -1,6 +1,4 @@ -""" helpers to manage the projects's database and produce fixtures/mockup data for testing - -""" +"""helpers to manage the projects's database and produce fixtures/mockup data for testing""" # pylint: disable=no-value-for-parameter @@ -16,9 +14,16 @@ from models_library.projects_nodes_io import NodeID from models_library.services_resources import ServiceResourcesDictHelpers from simcore_postgres_database.utils_projects_nodes import ProjectNodeCreate -from simcore_service_webserver.projects._db_utils import DB_EXCLUSIVE_COLUMNS -from simcore_service_webserver.projects._groups_db import update_or_insert_project_group -from simcore_service_webserver.projects.db import APP_PROJECT_DBAPI, ProjectDBAPI +from simcore_service_webserver.projects._groups_repository import ( + update_or_insert_project_group, +) +from simcore_service_webserver.projects._projects_repository_legacy import ( + APP_PROJECT_DBAPI, + ProjectDBAPI, +) +from simcore_service_webserver.projects._projects_repository_legacy_utils import ( + DB_EXCLUSIVE_COLUMNS, +) from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.utils import now_str diff --git a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py index cfe0a62b7d0..14cbb3a22ec 100644 --- a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py @@ -2,14 +2,15 @@ import importlib import inspect import itertools -import json import pkgutil +import warnings from collections.abc import Iterator from contextlib import suppress from types import ModuleType -from typing import Any, NamedTuple +from typing import Any, NamedTuple, TypeVar import pytest +from common_library.json_serialization import json_dumps from pydantic import BaseModel, ValidationError @@ -63,18 +64,20 @@ def walk_model_examples_in_package(package: ModuleType) -> Iterator[ModelExample def iter_model_examples_in_module(module: object) -> Iterator[ModelExample]: """Iterates on all examples defined as BaseModelClass.model_config["json_schema_extra"]["example"] - Usage: + import some_package.some_module @pytest.mark.parametrize( "model_cls, example_name, example_data", - iter_model_examples_in_module(simcore_service_webserver.storage_schemas), + iter_model_examples_in_module(some_package.some_module), ) def test_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - print(example_name, ":", json.dumps(example_data)) - assert model_cls.model_validate(example_data) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) + """ def _is_model_cls(obj) -> bool: @@ -95,36 +98,69 @@ def _is_model_cls(obj) -> bool: for model_name, model_cls in inspect.getmembers(module, _is_model_cls): - schema = model_cls.model_json_schema() + yield from iter_model_examples_in_class(model_cls, model_name) + + +def iter_model_examples_in_class( + model_cls: type[BaseModel], model_name: str | None = None +) -> Iterator[ModelExample]: + """Iterates on all examples within a base model class + + Usage: + + @pytest.mark.parametrize( + "model_cls, example_name, example_data", + iter_model_examples_in_class(SomeModelClass), + ) + def test_model_examples( + model_cls: type[BaseModel], example_name: str, example_data: Any + ): + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) + + """ + assert issubclass(model_cls, BaseModel) # nosec - if example := schema.get("example"): + if model_name is None: + model_name = f"{model_cls.__module__}.{model_cls.__name__}" + + schema = model_cls.model_json_schema() + + if example := schema.get("example"): + yield ModelExample( + model_cls=model_cls, + example_name=f"{model_name}_example", + example_data=example, + ) + + if many_examples := schema.get("examples"): + for index, example in enumerate(many_examples): yield ModelExample( model_cls=model_cls, - example_name=f"{model_name}_example", + example_name=f"{model_name}_examples_{index}", example_data=example, ) - if many_examples := schema.get("examples"): - for index, example in enumerate(many_examples): - yield ModelExample( - model_cls=model_cls, - example_name=f"{model_name}_examples_{index}", - example_data=example, - ) + +TBaseModel = TypeVar("TBaseModel", bound=BaseModel) def assert_validation_model( - model_cls: type[BaseModel], example_name: int, example_data: Any -): + model_cls: type[TBaseModel], example_name: str, example_data: Any +) -> TBaseModel: try: - assert model_cls.model_validate(example_data) is not None + model_instance = model_cls.model_validate(example_data) except ValidationError as err: pytest.fail( f"{example_name} is invalid {model_cls.__module__}.{model_cls.__name__}:" - f"\n{json.dumps(example_data, indent=1)}" + f"\n{json_dumps(example_data, indent=1)}" f"\nError: {err}" ) + assert isinstance(model_instance, model_cls) + return model_instance + ## PYDANTIC MODELS & SCHEMAS ----------------------------------------------------- @@ -134,7 +170,12 @@ def model_cls_examples(model_cls: type[BaseModel]) -> dict[str, dict[str, Any]]: """ Extracts examples from pydantic model class Config """ - + warnings.warn( + "The 'model_cls_examples' fixture is deprecated and will be removed in a future version. " + "Please use 'iter_model_example_in_class' or 'iter_model_examples_in_module' as an alternative.", + DeprecationWarning, + stacklevel=2, + ) # Use by defining model_cls as test parametrization assert model_cls, ( f"Testing against a {model_cls} model that has NO examples. Add them in Config class. " diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index a2b46d06679..5ad9ed0d671 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -12,7 +12,7 @@ from aioresponses.core import CallbackResult from faker import Faker from models_library.api_schemas_directorv2.comp_tasks import ComputationGet -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, @@ -93,7 +93,7 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: "62237c33-8d6c-4709-aa92-c3cf693dd6d2", "0bdf824f-57cb-4e38-949e-fd12c184f000", ] - node_states[node_id] = {"state": {"modified": True, "dependencies": []}} + node_states[node_id] = {"modified": True, "dependencies": []} node_states["62237c33-8d6c-4709-aa92-c3cf693dd6d2"] = { "modified": True, "dependencies": ["2f493631-30b4-4ad8-90f2-a74e4b46fe73"], @@ -105,10 +105,15 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: "62237c33-8d6c-4709-aa92-c3cf693dd6d2", ], } - returned_computation = ComputationTask.model_validate( - ComputationTask.model_config["json_schema_extra"]["examples"][0] - ).model_copy( - update={ + + json_schema = ComputationTask.model_json_schema() + assert isinstance(json_schema["examples"], list) + assert isinstance( + json_schema["examples"][0], dict + ) + computation: dict[str, Any] = json_schema["examples"][0].copy() + computation.update( + { "id": f"{kwargs['json']['project_id']}", "state": state, "pipeline_details": { @@ -118,6 +123,10 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: }, } ) + returned_computation = ComputationTask.model_validate( + computation + ) + return CallbackResult( status=201, # NOTE: aioresponses uses json.dump which does NOT encode serialization of UUIDs @@ -129,15 +138,20 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: state = RunningState.NOT_STARTED pipeline: dict[str, list[str]] = FULL_PROJECT_PIPELINE_ADJACENCY node_states = FULL_PROJECT_NODE_STATES - assert "json_schema_extra" in ComputationGet.model_config - assert isinstance(ComputationGet.model_config["json_schema_extra"], dict) + + json_schema = ComputationGet.model_json_schema() assert isinstance( - ComputationGet.model_config["json_schema_extra"]["examples"], list + json_schema["examples"], list ) - returned_computation = ComputationGet.model_validate( - ComputationGet.model_config["json_schema_extra"]["examples"][0] - ).model_copy( - update={ + assert isinstance( + json_schema["examples"][0], dict + ) + + computation: dict[str, Any] = json_schema[ + "examples" + ][0].copy() + computation.update( + { "id": Path(url.path).name, "state": state, "pipeline_details": { @@ -147,6 +161,7 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: }, } ) + returned_computation = ComputationGet.model_validate(computation) return CallbackResult( status=200, @@ -297,9 +312,7 @@ async def storage_v0_service_mock( aioresponses_mocker.get( get_file_metadata_pattern, status=status.HTTP_200_OK, - payload={ - "data": FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] - }, + payload={"data": FileMetaDataGet.model_json_schema()["examples"][0]}, repeat=True, ) aioresponses_mocker.get( diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py index 11dd165a963..749bbc04230 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py @@ -37,6 +37,7 @@ "static-webserver", "traefik", "whoami", + "sto-worker", } # TODO: unify healthcheck policies see https://github.com/ITISFoundation/osparc-simcore/pull/2281 SERVICE_PUBLISHED_PORT = {} @@ -52,10 +53,12 @@ "invitations": "/", "payments": "/", "resource-usage-tracker": "/", + "docker-api-proxy": "/version", } AIOHTTP_BASED_SERVICE_PORT: int = 8080 FASTAPI_BASED_SERVICE_PORT: int = 8000 DASK_SCHEDULER_SERVICE_PORT: int = 8787 +DOCKER_API_PROXY_SERVICE_PORT: int = 8888 _SERVICE_NAME_REPLACEMENTS: dict[str, str] = { "dynamic-scheduler": "dynamic-schdlr", @@ -133,6 +136,7 @@ def services_endpoint( AIOHTTP_BASED_SERVICE_PORT, FASTAPI_BASED_SERVICE_PORT, DASK_SCHEDULER_SERVICE_PORT, + DOCKER_API_PROXY_SERVICE_PORT, ] endpoint = URL( f"http://{get_localhost_ip()}:{get_service_published_port(full_service_name, target_ports)}" diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py new file mode 100644 index 00000000000..4ca55f24bd6 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py @@ -0,0 +1,253 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from collections.abc import AsyncIterator, Awaitable, Callable +from contextlib import asynccontextmanager +from typing import Any + +import pytest +import sqlalchemy as sa +from faker import Faker +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.users import UserID +from pydantic import TypeAdapter +from simcore_postgres_database.models.project_to_groups import project_to_groups +from simcore_postgres_database.storage_models import projects, users +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine + +from .helpers.faker_factories import random_project, random_user + + +@asynccontextmanager +async def _user_context( + sqlalchemy_async_engine: AsyncEngine, *, name: str +) -> AsyncIterator[UserID]: + # inject a random user in db + + # NOTE: Ideally this (and next fixture) should be done via webserver API but at this point + # in time, the webserver service would bring more dependencies to other services + # which would turn this test too complex. + + # pylint: disable=no-value-for-parameter + stmt = users.insert().values(**random_user(name=name)).returning(users.c.id) + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute(stmt) + row = result.one() + assert isinstance(row.id, int) + + try: + yield TypeAdapter(UserID).validate_python(row.id) + finally: + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute(users.delete().where(users.c.id == row.id)) + + +@pytest.fixture +async def user_id(sqlalchemy_async_engine: AsyncEngine) -> AsyncIterator[UserID]: + async with _user_context(sqlalchemy_async_engine, name="test-user") as new_user_id: + yield new_user_id + + +@pytest.fixture +async def other_user_id(sqlalchemy_async_engine: AsyncEngine) -> AsyncIterator[UserID]: + async with _user_context( + sqlalchemy_async_engine, name="test-other-user" + ) as new_user_id: + yield new_user_id + + +@pytest.fixture +async def create_project( + user_id: UserID, sqlalchemy_async_engine: AsyncEngine +) -> AsyncIterator[Callable[[], Awaitable[dict[str, Any]]]]: + created_project_uuids = [] + + async def _creator(**kwargs) -> dict[str, Any]: + prj_config = {"prj_owner": user_id} + prj_config.update(kwargs) + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + projects.insert() + .values(**random_project(**prj_config)) + .returning(sa.literal_column("*")) + ) + row = result.one() + created_project_uuids.append(row.uuid) + return dict(row._asdict()) + + yield _creator + # cleanup + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute( + projects.delete().where(projects.c.uuid.in_(created_project_uuids)) + ) + + +@pytest.fixture +async def create_project_access_rights( + sqlalchemy_async_engine: AsyncEngine, +) -> AsyncIterator[Callable[[ProjectID, UserID, bool, bool, bool], Awaitable[None]]]: + _created = [] + + async def _creator( + project_id: ProjectID, user_id: UserID, read: bool, write: bool, delete: bool + ) -> None: + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + project_to_groups.insert() + .values( + project_uuid=f"{project_id}", + gid=sa.select(users.c.primary_gid) + .where(users.c.id == user_id) + .scalar_subquery(), + read=read, + write=write, + delete=delete, + ) + .returning(sa.literal_column("*")) + ) + row = result.one() + _created.append((row.project_uuid, row.gid)) + + yield _creator + + # cleanup + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute( + project_to_groups.delete().where( + sa.or_( + *( + (project_to_groups.c.project_uuid == pid) + & (project_to_groups.c.gid == gid) + for pid, gid in _created + ) + ) + ) + ) + + +@pytest.fixture +async def project_id( + create_project: Callable[[], Awaitable[dict[str, Any]]], +) -> ProjectID: + project = await create_project() + return ProjectID(project["uuid"]) + + +@pytest.fixture +async def collaborator_id( + sqlalchemy_async_engine: AsyncEngine, +) -> AsyncIterator[UserID]: + async with _user_context( + sqlalchemy_async_engine, name="collaborator" + ) as new_user_id: + yield TypeAdapter(UserID).validate_python(new_user_id) + + +@pytest.fixture +def share_with_collaborator( + sqlalchemy_async_engine: AsyncEngine, + collaborator_id: UserID, + user_id: UserID, + project_id: ProjectID, +) -> Callable[[], Awaitable[None]]: + async def _get_user_group(conn: AsyncConnection, query_user: int) -> int: + result = await conn.execute( + sa.select(users.c.primary_gid).where(users.c.id == query_user) + ) + row = result.fetchone() + assert row + primary_gid: int = row.primary_gid + return primary_gid + + async def _() -> None: + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + sa.select(projects.c.access_rights).where( + projects.c.uuid == f"{project_id}" + ) + ) + row = result.fetchone() + assert row + access_rights: dict[str | int, Any] = row.access_rights + + access_rights[await _get_user_group(conn, user_id)] = { + "read": True, + "write": True, + "delete": True, + } + access_rights[await _get_user_group(conn, collaborator_id)] = { + "read": True, + "write": True, + "delete": False, + } + + await conn.execute( + projects.update() + .where(projects.c.uuid == f"{project_id}") + .values(access_rights=access_rights) + ) + + # project_to_groups needs to be updated + for group_id, permissions in access_rights.items(): + insert_stmt = pg_insert(project_to_groups).values( + project_uuid=f"{project_id}", + gid=int(group_id), + read=permissions["read"], + write=permissions["write"], + delete=permissions["delete"], + created=sa.func.now(), + modified=sa.func.now(), + ) + on_update_stmt = insert_stmt.on_conflict_do_update( + index_elements=[ + project_to_groups.c.project_uuid, + project_to_groups.c.gid, + ], + set_={ + "read": insert_stmt.excluded.read, + "write": insert_stmt.excluded.write, + "delete": insert_stmt.excluded.delete, + "modified": sa.func.now(), + }, + ) + await conn.execute(on_update_stmt) + + return _ + + +@pytest.fixture +async def create_project_node( + user_id: UserID, sqlalchemy_async_engine: AsyncEngine, faker: Faker +) -> Callable[..., Awaitable[NodeID]]: + async def _creator( + project_id: ProjectID, node_id: NodeID | None = None, **kwargs + ) -> NodeID: + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + sa.select(projects.c.workbench).where( + projects.c.uuid == f"{project_id}" + ) + ) + row = result.fetchone() + assert row + project_workbench: dict[str, Any] = row.workbench + new_node_id = node_id or NodeID(f"{faker.uuid4()}") + node_data = { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "pytest_fake_node", + } + node_data.update(**kwargs) + project_workbench.update({f"{new_node_id}": node_data}) + await conn.execute( + projects.update() + .where(projects.c.uuid == f"{project_id}") + .values(workbench=project_workbench) + ) + return new_node_id + + return _creator diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_datcore_adapter.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_datcore_adapter.py new file mode 100644 index 00000000000..892d63060e0 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_datcore_adapter.py @@ -0,0 +1,66 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import re +from collections.abc import Iterator + +import httpx +import pytest +import respx +from faker import Faker +from fastapi_pagination import Page, Params +from pytest_simcore.helpers.host import get_localhost_ip +from servicelib.aiohttp import status +from simcore_service_storage.modules.datcore_adapter.datcore_adapter_settings import ( + DatcoreAdapterSettings, +) + + +@pytest.fixture +def datcore_adapter_service_mock(faker: Faker) -> Iterator[respx.MockRouter]: + dat_core_settings = DatcoreAdapterSettings.create_from_envs() + datcore_adapter_base_url = dat_core_settings.endpoint + # mock base endpoint + with respx.mock( + base_url=datcore_adapter_base_url, + assert_all_called=False, + assert_all_mocked=True, + ) as respx_mocker: + # NOTE: passthrough the locahost and the local ip + respx_mocker.route(host="127.0.0.1").pass_through() + respx_mocker.route(host=get_localhost_ip()).pass_through() + + respx_mocker.get("/user/profile", name="get_user_profile").respond( + status.HTTP_200_OK, json=faker.pydict(allowed_types=(str,)) + ) + respx_mocker.get( + re.compile(r"/datasets/(?P[^/]+)/files_legacy") + ).respond(status.HTTP_200_OK, json=[]) + list_datasets_re = re.compile(r"/datasets") + respx_mocker.get(list_datasets_re, name="list_datasets").respond( + status.HTTP_200_OK, + json=Page.create(items=[], params=Params(size=10), total=0).model_dump( + mode="json" + ), + ) + + def _create_download_link(request, file_id): + return httpx.Response( + status.HTTP_404_NOT_FOUND, + json={"error": f"{file_id} not found!"}, + ) + + respx_mocker.get( + re.compile(r"/files/(?P[^/]+)"), name="get_file_dowload_link" + ).mock(side_effect=_create_download_link) + + respx_mocker.get( + "/", + name="healthcheck", + ).respond(status.HTTP_200_OK, json={"message": "ok"}) + respx_mocker.get("", name="base_endpoint").respond( + status.HTTP_200_OK, json={"message": "root entrypoint"} + ) + + yield respx_mocker diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py index a14e61a1ba5..02e3ddbc167 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py @@ -4,6 +4,7 @@ import os from collections.abc import Callable, Iterable from copy import deepcopy +from pathlib import Path import aiohttp import pytest @@ -12,7 +13,6 @@ from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from pydantic import TypeAdapter from pytest_mock import MockerFixture -from servicelib.minio_utils import ServiceRetryPolicyUponInitialization from yarl import URL from .helpers.docker import get_service_published_port @@ -53,31 +53,41 @@ async def storage_service( assert storage_endpoint.host is not None assert storage_endpoint.port is not None mocker.patch( - "simcore_sdk.node_ports_common._filemanager._get_https_link_if_storage_secure", + "simcore_sdk.node_ports_common._filemanager_utils._get_https_link_if_storage_secure", replace_storage_endpoint(storage_endpoint.host, storage_endpoint.port), ) return storage_endpoint -# TODO: this can be used by ANY of the simcore services! -@tenacity.retry(**ServiceRetryPolicyUponInitialization().kwargs) +@tenacity.retry( + wait=tenacity.wait_fixed(1), + stop=tenacity.stop_after_delay(30), + reraise=True, +) async def wait_till_storage_responsive(storage_endpoint: URL): - async with aiohttp.ClientSession() as session: - async with session.get(storage_endpoint.with_path("/v0/")) as resp: - assert resp.status == 200 - data = await resp.json() - assert "data" in data - assert data["data"] is not None + async with ( + aiohttp.ClientSession() as session, + session.get(storage_endpoint.with_path("/v0/")) as resp, + ): + assert resp.status == 200 + data = await resp.json() + assert "data" in data + assert data["data"] is not None @pytest.fixture def create_simcore_file_id() -> Callable[[ProjectID, NodeID, str], SimcoreS3FileID]: def _creator( - project_id: ProjectID, node_id: NodeID, file_name: str + project_id: ProjectID, + node_id: NodeID, + file_name: str, + file_base_path: Path | None = None, ) -> SimcoreS3FileID: - return TypeAdapter(SimcoreS3FileID).validate_python( - f"{project_id}/{node_id}/{file_name}" - ) + s3_file_name = file_name + if file_base_path: + s3_file_name = f"{file_base_path / file_name}" + clean_path = Path(f"{project_id}/{node_id}/{s3_file_name}") + return TypeAdapter(SimcoreS3FileID).validate_python(f"{clean_path}") return _creator diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index 9f2a79caef1..332dcc97001 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -11,7 +11,7 @@ attrs==25.1.0 # referencing binaryornot==0.4.4 # via cookiecutter -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -79,7 +79,7 @@ packaging==24.2 # via pytest pluggy==1.5.0 # via pytest -propcache==0.2.1 +propcache==0.3.0 # via yarl pydantic==2.10.6 # via @@ -99,11 +99,16 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in -pydantic-settings==2.7.1 - # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.7.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in pygments==2.19.1 # via rich -pytest==8.3.4 +pytest==8.3.5 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -135,7 +140,7 @@ rich==13.9.4 # via # cookiecutter # typer -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -145,7 +150,7 @@ six==1.17.0 # via python-dateutil text-unidecode==1.3 # via python-slugify -typer==0.15.1 +typer==0.15.2 # via -r requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow diff --git a/packages/service-integration/requirements/_test.txt b/packages/service-integration/requirements/_test.txt index a9181a5e595..19f48613efa 100644 --- a/packages/service-integration/requirements/_test.txt +++ b/packages/service-integration/requirements/_test.txt @@ -2,7 +2,7 @@ attrs==25.1.0 # via # -c requirements/_base.txt # referencing -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov @@ -19,7 +19,7 @@ pluggy==1.5.0 # via # -c requirements/_base.txt # pytest -pytest==8.3.4 +pytest==8.3.5 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -39,7 +39,7 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # types-jsonschema -rpds-py==0.22.3 +rpds-py==0.23.1 # via # -c requirements/_base.txt # referencing @@ -51,7 +51,7 @@ types-jsonschema==4.23.0.20241208 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in -types-requests==2.32.0.20241016 +types-requests==2.32.0.20250301 # via types-docker urllib3==2.3.0 # via diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt index a3959933dfd..3b167383938 100644 --- a/packages/service-integration/requirements/_tools.txt +++ b/packages/service-integration/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -43,7 +43,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -54,7 +54,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -65,9 +65,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -75,7 +75,7 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/service-integration/tests/test_versioning.py b/packages/service-integration/tests/test_versioning.py index 01c36e49082..26fe2962a61 100644 --- a/packages/service-integration/tests/test_versioning.py +++ b/packages/service-integration/tests/test_versioning.py @@ -2,10 +2,17 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -import json + +import itertools +from typing import Any import pytest from packaging.version import Version +from pydantic import BaseModel +from pytest_simcore.pydantic_models import ( + assert_validation_model, + iter_model_examples_in_class, +) from service_integration.versioning import ( ExecutableVersionInfo, ServiceVersionInfo, @@ -45,11 +52,15 @@ def test_bump_version_string( @pytest.mark.parametrize( - "model_cls", - [ExecutableVersionInfo, ServiceVersionInfo], + "model_cls, example_name, example_data", + itertools.chain( + iter_model_examples_in_class(ExecutableVersionInfo), + iter_model_examples_in_class(ServiceVersionInfo), + ), ) -def test_version_info_model_examples(model_cls, model_cls_examples): - for name, example in model_cls_examples.items(): - print(name, ":", json.dumps(example, indent=1)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" +def test_version_info_model_examples( + model_cls: type[BaseModel], example_name: str, example_data: Any +): + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/packages/service-library/requirements/_aiohttp.txt b/packages/service-library/requirements/_aiohttp.txt index f649b58d694..345626fc06b 100644 --- a/packages/service-library/requirements/_aiohttp.txt +++ b/packages/service-library/requirements/_aiohttp.txt @@ -1,6 +1,6 @@ -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -21,7 +21,7 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-semantic-conventions @@ -45,7 +45,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client @@ -53,27 +53,27 @@ opentelemetry-api==1.29.0 # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi -opentelemetry-instrumentation-aiohttp-client==0.50b0 +opentelemetry-instrumentation-aiohttp-client==0.51b0 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-aiohttp-server==0.50b0 +opentelemetry-instrumentation-aiohttp-server==0.51b0 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-aiopg==0.50b0 +opentelemetry-instrumentation-aiopg==0.51b0 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-dbapi==0.50b0 +opentelemetry-instrumentation-dbapi==0.51b0 # via opentelemetry-instrumentation-aiopg -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-dbapi -opentelemetry-util-http==0.50b0 +opentelemetry-util-http==0.51b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server @@ -81,7 +81,7 @@ packaging==24.2 # via opentelemetry-instrumentation prometheus-client==0.21.1 # via -r requirements/_aiohttp.in -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl @@ -99,7 +99,7 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in index 7961b097098..027a631287e 100644 --- a/packages/service-library/requirements/_base.in +++ b/packages/service-library/requirements/_base.in @@ -18,15 +18,16 @@ arrow # date/time faststream opentelemetry-api opentelemetry-exporter-otlp -opentelemetry-instrumentation-requests -opentelemetry-instrumentation-redis opentelemetry-instrumentation-logging +opentelemetry-instrumentation-redis +opentelemetry-instrumentation-requests opentelemetry-sdk psutil pydantic pyinstrument pyyaml redis +stream-zip tenacity toolz tqdm diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index e22224d901a..598db33e994 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.4 +aio-pika==9.5.5 # via -r requirements/_base.in aiocache==0.12.3 # via -r requirements/_base.in @@ -8,9 +8,9 @@ aiodocker==0.24.0 # via -r requirements/_base.in aiofiles==24.1.0 # via -r requirements/_base.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -38,7 +38,7 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -51,7 +51,7 @@ charset-normalizer==3.4.1 # via requests click==8.1.8 # via typer -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -65,13 +65,13 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.34 +faststream==0.5.35 # via -r requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.68.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -97,7 +97,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # -r requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -108,44 +108,44 @@ opentelemetry-api==1.29.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.29.0 +opentelemetry-exporter-otlp==1.30.0 # via -r requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.29.0 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.29.0 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.29.0 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-logging==0.50b0 +opentelemetry-instrumentation-logging==0.51b0 # via -r requirements/_base.in -opentelemetry-instrumentation-redis==0.50b0 +opentelemetry-instrumentation-redis==0.51b0 # via -r requirements/_base.in -opentelemetry-instrumentation-requests==0.50b0 +opentelemetry-instrumentation-requests==0.51b0 # via -r requirements/_base.in -opentelemetry-proto==1.29.0 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.29.0 +opentelemetry-sdk==1.30.0 # via # -r requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.50b0 +opentelemetry-util-http==0.51b0 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -163,7 +163,7 @@ packaging==24.2 # via opentelemetry-instrumentation pamqp==3.3.0 # via aiormq -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl @@ -171,8 +171,10 @@ protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.1 +psutil==7.0.0 # via -r requirements/_base.in +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -198,8 +200,14 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 @@ -244,7 +252,7 @@ rich==13.9.4 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -254,13 +262,15 @@ six==1.17.0 # via python-dateutil sniffio==1.3.1 # via anyio +stream-zip==0.0.83 + # via -r requirements/_base.in tenacity==9.0.0 # via -r requirements/_base.in toolz==1.0.0 # via -r requirements/_base.in tqdm==4.67.1 # via -r requirements/_base.in -typer==0.15.1 +typer==0.15.2 # via -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow diff --git a/packages/service-library/requirements/_fastapi.in b/packages/service-library/requirements/_fastapi.in index e11871af331..1ea0f1c0477 100644 --- a/packages/service-library/requirements/_fastapi.in +++ b/packages/service-library/requirements/_fastapi.in @@ -7,6 +7,7 @@ fastapi +fastapi-lifespan-manager httpx opentelemetry-instrumentation-fastapi opentelemetry-instrumentation-httpx diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index 23d7a0ec7ee..48615d9819a 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -6,7 +6,7 @@ anyio==4.8.0 # starlette asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -18,11 +18,15 @@ certifi==2024.12.14 # httpx click==8.1.8 # via uvicorn -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-semantic-conventions -fastapi==0.115.7 +fastapi==0.115.11 + # via + # -r requirements/_fastapi.in + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 # via -r requirements/_fastapi.in h11==0.14.0 # via @@ -45,31 +49,31 @@ idna==3.10 # httpx importlib-metadata==8.5.0 # via opentelemetry-api -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx -opentelemetry-instrumentation-asgi==0.50b0 +opentelemetry-instrumentation-asgi==0.51b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.50b0 +opentelemetry-instrumentation-fastapi==0.51b0 # via -r requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.50b0 +opentelemetry-instrumentation-httpx==0.51b0 # via -r requirements/_fastapi.in -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx -opentelemetry-util-http==0.50b0 +opentelemetry-util-http==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -95,7 +99,7 @@ pydantic-core==2.27.2 # via pydantic sniffio==1.3.1 # via anyio -starlette==0.45.3 +starlette==0.46.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 62ff9a22618..e2286213c7f 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -1,9 +1,9 @@ -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via # -c requirements/_aiohttp.txt # -c requirements/_base.txt # aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_aiohttp.txt @@ -29,9 +29,9 @@ attrs==25.1.0 # jsonschema # pytest-docker # referencing -botocore==1.36.6 +botocore==1.37.4 # via -r requirements/_test.in -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -43,7 +43,7 @@ charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov @@ -51,7 +51,7 @@ docker==7.1.0 # via -r requirements/_test.in execnet==2.1.1 # via pytest-xdist -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -115,11 +115,11 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -numpy==2.2.2 +numpy==2.2.3 # via -r requirements/_test.in openapi-schema-validator==0.6.3 # via openapi-spec-validator @@ -136,13 +136,13 @@ pathable==0.4.4 # via jsonschema-path pillow==11.1.0 # via -r requirements/_test.in -pip==25.0 +pip==25.0.1 # via -r requirements/_test.in pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -propcache==0.2.1 +propcache==0.3.0 # via # -c requirements/_aiohttp.txt # -c requirements/_base.txt @@ -150,7 +150,7 @@ propcache==0.2.1 # yarl py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-aiohttp @@ -174,7 +174,7 @@ pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in @@ -192,7 +192,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker python-dotenv==1.0.1 # via # -c requirements/_base.txt @@ -219,7 +218,7 @@ respx==0.22.0 # via -r requirements/_test.in rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.22.3 +rpds-py==0.23.1 # via # -c requirements/_aiohttp.txt # -c requirements/_base.txt @@ -247,22 +246,23 @@ termcolor==2.5.0 # via pytest-sugar types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -types-psutil==6.1.0.20241221 +types-psutil==7.0.0.20250218 # via -r requirements/_test.in types-psycopg2==2.9.21.20250121 # via -r requirements/_test.in -types-requests==2.32.0.20241016 +types-requests==2.32.0.20250301 # via types-tqdm -types-tqdm==4.67.0.20241221 +types-tqdm==4.67.0.20250301 # via -r requirements/_test.in typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_fastapi.txt # anyio - # faker # mypy # sqlalchemy2-stubs +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt index d7a9629ae3f..985c2c3bc85 100644 --- a/packages/service-library/requirements/_tools.txt +++ b/packages/service-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -46,7 +46,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via # -c requirements/_test.txt # pip-tools @@ -59,7 +59,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -71,9 +71,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -82,7 +82,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py index 04071d5d07c..e29fabc87fe 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py @@ -1,30 +1,32 @@ import asyncio -from collections.abc import AsyncGenerator, Coroutine -from dataclasses import dataclass -from typing import Any, Final, TypeAlias +import logging +from collections.abc import AsyncGenerator +from typing import Any from aiohttp import ClientConnectionError, ClientSession -from servicelib.aiohttp import status from tenacity import TryAgain, retry from tenacity.asyncio import AsyncRetrying +from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_random_exponential from yarl import URL -from ..rest_responses import unwrap_envelope +from ...long_running_tasks._constants import DEFAULT_POLL_INTERVAL_S, HOUR +from ...long_running_tasks._models import LRTask, RequestBody +from ...rest_responses import unwrap_envelope_if_required +from .. import status from .server import TaskGet, TaskId, TaskProgress, TaskStatus -RequestBody: TypeAlias = Any +_logger = logging.getLogger(__name__) + -_MINUTE: Final[int] = 60 # in secs -_HOUR: Final[int] = 60 * _MINUTE # in secs -_DEFAULT_POLL_INTERVAL_S: Final[float] = 1 _DEFAULT_AIOHTTP_RETRY_POLICY: dict[str, Any] = { "retry": retry_if_exception_type(ClientConnectionError), "wait": wait_random_exponential(max=20), "stop": stop_after_delay(60), "reraise": True, + "before_sleep": before_sleep_log(_logger, logging.INFO), } @@ -32,9 +34,7 @@ async def _start(session: ClientSession, url: URL, json: RequestBody | None) -> TaskGet: async with session.post(url, json=json) as response: response.raise_for_status() - data, error = unwrap_envelope(await response.json()) - assert not error # nosec - assert data is not None # nosec + data = unwrap_envelope_if_required(await response.json()) return TaskGet.model_validate(data) @@ -50,21 +50,18 @@ async def _wait_for_completion( stop=stop_after_delay(client_timeout), reraise=True, retry=retry_if_exception_type(TryAgain), + before_sleep=before_sleep_log(_logger, logging.DEBUG), ): with attempt: async with session.get(status_url) as response: response.raise_for_status() - data, error = unwrap_envelope(await response.json()) - assert not error # nosec - assert data is not None # nosec + data = unwrap_envelope_if_required(await response.json()) task_status = TaskStatus.model_validate(data) yield task_status.task_progress if not task_status.done: await asyncio.sleep( float( - response.headers.get( - "retry-after", _DEFAULT_POLL_INTERVAL_S - ) + response.headers.get("retry-after", DEFAULT_POLL_INTERVAL_S) ) ) msg = f"{task_id=}, {task_status.started=} has status: '{task_status.task_progress.message}' {task_status.task_progress.percent}%" @@ -73,7 +70,7 @@ async def _wait_for_completion( except TryAgain as exc: # this is a timeout msg = f"Long running task {task_id}, calling to {status_url} timed-out after {client_timeout} seconds" - raise asyncio.TimeoutError(msg) from exc + raise TimeoutError(msg) from exc @retry(**_DEFAULT_AIOHTTP_RETRY_POLICY) @@ -81,10 +78,7 @@ async def _task_result(session: ClientSession, result_url: URL) -> Any: async with session.get(result_url) as response: response.raise_for_status() if response.status != status.HTTP_204_NO_CONTENT: - data, error = unwrap_envelope(await response.json()) - assert not error # nosec - assert data # nosec - return data + return unwrap_envelope_if_required(await response.json()) return None @@ -92,31 +86,13 @@ async def _task_result(session: ClientSession, result_url: URL) -> Any: async def _abort_task(session: ClientSession, abort_url: URL) -> None: async with session.delete(abort_url) as response: response.raise_for_status() - data, error = unwrap_envelope(await response.json()) - assert not error # nosec - assert not data # nosec - - -@dataclass(frozen=True) -class LRTask: - progress: TaskProgress - _result: Coroutine[Any, Any, Any] | None = None - - def done(self) -> bool: - return self._result is not None - - async def result(self) -> Any: - if not self._result: - msg = "No result ready!" - raise ValueError(msg) - return await self._result async def long_running_task_request( session: ClientSession, url: URL, json: RequestBody | None = None, - client_timeout: int = 1 * _HOUR, + client_timeout: int = 1 * HOUR, ) -> AsyncGenerator[LRTask, None]: """Will use the passed `ClientSession` to call an oSparc long running task `url` passing `json` as request body. @@ -143,7 +119,10 @@ async def long_running_task_request( _result=_task_result(session, URL(task.result_href)), ) - except (asyncio.CancelledError, asyncio.TimeoutError): + except (TimeoutError, asyncio.CancelledError): if task: await _abort_task(session, URL(task.abort_href)) raise + + +__all__: tuple[str, ...] = ("LRTask",) diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index a61d8d538cd..d2141dee6a3 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -1,4 +1,4 @@ -""" Parses and validation aiohttp requests against pydantic models +"""Parses and validation aiohttp requests against pydantic models Rationale: These functions follow an interface analogous to ``pydantic.tools``'s @@ -10,7 +10,7 @@ import json.decoder from collections.abc import Iterator from contextlib import contextmanager -from typing import TypeAlias, TypeVar, Union +from typing import TypeVar from aiohttp import web from common_library.json_serialization import json_dumps @@ -21,7 +21,6 @@ ModelClass = TypeVar("ModelClass", bound=BaseModel) ModelOrListOrDictType = TypeVar("ModelOrListOrDictType", bound=BaseModel | list | dict) -UnionOfModelTypes: TypeAlias = Union[type[ModelClass], type[ModelClass]] # noqa: UP007 @contextmanager @@ -132,7 +131,7 @@ def parse_request_path_parameters_as( def parse_request_query_parameters_as( - parameters_schema_cls: type[ModelClass] | UnionOfModelTypes, + parameters_schema_cls: type[ModelClass], request: web.Request, *, use_enveloped_error_v1: bool = True, diff --git a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py index 5bdaf3bf6cf..d40abae5669 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py @@ -1,9 +1,8 @@ -""" rest - middlewares for error, enveloping and validation +"""rest - middlewares for error, enveloping and validation - SEE https://gist.github.com/amitripshtos/854da3f4217e3441e8fceea85b0cbd91 +SEE https://gist.github.com/amitripshtos/854da3f4217e3441e8fceea85b0cbd91 """ -import asyncio import json import logging from collections.abc import Awaitable, Callable @@ -18,22 +17,16 @@ from ..logging_errors import create_troubleshotting_log_kwargs from ..mimetype_constants import MIMETYPE_APPLICATION_JSON +from ..rest_responses import is_enveloped_from_map, is_enveloped_from_text from ..utils import is_production_environ -from .rest_responses import ( - create_data_response, - create_http_error, - is_enveloped_from_map, - is_enveloped_from_text, - wrap_as_envelope, -) +from .rest_responses import create_data_response, create_http_error, wrap_as_envelope from .rest_utils import EnvelopeFactory from .typing_extension import Handler, Middleware DEFAULT_API_VERSION = "v0" -_FMSG_INTERNAL_ERROR_USER_FRIENDLY_WITH_OEC = ( - "We apologize for the inconvenience." - " Our team has recorded the issue [SupportID={error_code}] and is working to resolve it as quickly as possible." - " Thank you for your patience" +_FMSG_INTERNAL_ERROR_USER_FRIENDLY = ( + "We apologize for the inconvenience. " + "The issue has been recorded, please report it if it persists." ) @@ -51,7 +44,6 @@ def error_middleware_factory( _is_prod: bool = is_production_environ() def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception): - error_code = create_error_code(err) error_context: dict[str, Any] = { "request.remote": f"{request.remote}", @@ -59,14 +51,13 @@ def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception "request.path": f"{request.path}", } - user_error_msg = _FMSG_INTERNAL_ERROR_USER_FRIENDLY_WITH_OEC.format( - error_code=error_code - ) + user_error_msg = _FMSG_INTERNAL_ERROR_USER_FRIENDLY http_error = create_http_error( err, user_error_msg, web.HTTPInternalServerError, skip_internal_error_details=_is_prod, + error_code=error_code, ) _logger.exception( **create_troubleshotting_log_kwargs( @@ -137,7 +128,7 @@ async def _middleware_handler(request: web.Request, handler: Handler): ) raise http_error from err - except asyncio.TimeoutError as err: + except TimeoutError as err: http_error = create_http_error( err, f"{err}", diff --git a/packages/service-library/src/servicelib/aiohttp/rest_responses.py b/packages/service-library/src/servicelib/aiohttp/rest_responses.py index d16f33b9e57..8ddf5090b5a 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_responses.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_responses.py @@ -1,44 +1,20 @@ -""" Utils to check, convert and compose server responses for the RESTApi - -""" +"""Utils to check, convert and compose server responses for the RESTApi""" import inspect -import json -from collections.abc import Mapping from typing import Any from aiohttp import web, web_exceptions from aiohttp.web_exceptions import HTTPError, HTTPException +from common_library.error_codes import ErrorCodeStr from common_library.json_serialization import json_dumps from models_library.rest_error import ErrorGet, ErrorItemType -from servicelib.aiohttp.status import HTTP_200_OK +from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from ..aiohttp.status import HTTP_200_OK from ..mimetype_constants import MIMETYPE_APPLICATION_JSON +from ..rest_responses import is_enveloped from ..status_codes_utils import get_code_description -_ENVELOPE_KEYS = ("data", "error") - - -def is_enveloped_from_map(payload: Mapping) -> bool: - return all(k in _ENVELOPE_KEYS for k in payload if not f"{k}".startswith("_")) - - -def is_enveloped_from_text(text: str) -> bool: - try: - payload = json.loads(text) - except json.decoder.JSONDecodeError: - return False - return is_enveloped_from_map(payload) - - -def is_enveloped(payload: Mapping | str) -> bool: - # pylint: disable=isinstance-second-argument-not-valid-type - if isinstance(payload, Mapping): - return is_enveloped_from_map(payload) - if isinstance(payload, str): - return is_enveloped_from_text(text=payload) - return False - def wrap_as_envelope( data: Any = None, @@ -47,13 +23,6 @@ def wrap_as_envelope( return {"data": data, "error": error} -def unwrap_envelope(payload: dict[str, Any]) -> tuple: - """ - Safe returns (data, error) tuple from a response payload - """ - return tuple(payload.get(k) for k in _ENVELOPE_KEYS) if payload else (None, None) - - # RESPONSES FACTORIES ------------------------------- @@ -85,6 +54,7 @@ def create_http_error( http_error_cls: type[HTTPError] = web.HTTPInternalServerError, *, skip_internal_error_details: bool = False, + error_code: ErrorCodeStr | None = None ) -> HTTPError: """ - Response body conforms OAS schema model @@ -94,33 +64,38 @@ def create_http_error( if not isinstance(errors, list): errors = [errors] - # TODO: guarantee no throw! - is_internal_error: bool = http_error_cls == web.HTTPInternalServerError default_message = reason or get_code_description(http_error_cls.status_code) if is_internal_error and skip_internal_error_details: - error = ErrorGet( - errors=[], - logs=[], - status=http_error_cls.status_code, - message=default_message, + error = ErrorGet.model_validate( + { + "status": http_error_cls.status_code, + "message": default_message, + "support_id": error_code, + } ) else: items = [ErrorItemType.from_error(err) for err in errors] - error = ErrorGet( - errors=items, - logs=[], - status=http_error_cls.status_code, - message=default_message, + error = ErrorGet.model_validate( + { + "errors": items, # NOTE: deprecated! + "status": http_error_cls.status_code, + "message": default_message, + "support_id": error_code, + } ) assert not http_error_cls.empty_body # nosec - payload = wrap_as_envelope(error=error) + payload = wrap_as_envelope( + error=error.model_dump(mode="json", **RESPONSE_MODEL_POLICY) + ) return http_error_cls( reason=reason, - text=json_dumps(payload), + text=json_dumps( + payload, + ), content_type=MIMETYPE_APPLICATION_JSON, ) diff --git a/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py b/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py index c14b25726a5..1e642895f1d 100644 --- a/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py +++ b/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py @@ -10,7 +10,6 @@ from typing import Final import tqdm -from models_library.basic_types import IDStr from pydantic import NonNegativeInt from servicelib.logging_utils import log_catch from tqdm.contrib.logging import tqdm_logging_redirect @@ -199,7 +198,7 @@ async def archive_dir( ) -> None: if progress_bar is None: progress_bar = ProgressBarData( - num_steps=1, description=IDStr(f"compressing {dir_to_compress.name}") + num_steps=1, description=f"compressing {dir_to_compress.name}" ) options = " ".join( @@ -223,7 +222,7 @@ async def archive_dir( async with AsyncExitStack() as exit_stack: sub_progress = await exit_stack.enter_async_context( - progress_bar.sub_progress(folder_size_bytes, description=IDStr("...")) + progress_bar.sub_progress(folder_size_bytes, description="...") ) tqdm_progress = exit_stack.enter_context( @@ -290,7 +289,7 @@ async def unarchive_dir( ) -> set[Path]: if progress_bar is None: progress_bar = ProgressBarData( - num_steps=1, description=IDStr(f"extracting {archive_to_extract.name}") + num_steps=1, description=f"extracting {archive_to_extract.name}" ) # get archive information @@ -304,7 +303,7 @@ async def unarchive_dir( async with AsyncExitStack() as exit_stack: sub_prog = await exit_stack.enter_async_context( - progress_bar.sub_progress(steps=total_bytes, description=IDStr("...")) + progress_bar.sub_progress(steps=total_bytes, description="...") ) tqdm_progress = exit_stack.enter_context( diff --git a/packages/service-library/src/servicelib/background_task.py b/packages/service-library/src/servicelib/background_task.py index 793d05b1f9b..feeb06ff475 100644 --- a/packages/service-library/src/servicelib/background_task.py +++ b/packages/service-library/src/servicelib/background_task.py @@ -60,6 +60,11 @@ def periodic( def _decorator( func: Callable[P, Coroutine[Any, Any, None]], ) -> Callable[P, Coroutine[Any, Any, None]]: + class _InternalTryAgain(TryAgain): + # Local exception to prevent reacting to similarTryAgain exceptions raised by the wrapped func + # e.g. when this decorators is used twice on the same function + ... + nap = ( asyncio.sleep if early_wake_up_event is None @@ -71,7 +76,7 @@ def _decorator( wait=wait_fixed(interval.total_seconds()), reraise=True, retry=( - retry_if_exception_type(TryAgain) + retry_if_exception_type(_InternalTryAgain) if raise_on_error else retry_if_exception_type() ), @@ -80,7 +85,7 @@ def _decorator( @functools.wraps(func) async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> None: await func(*args, **kwargs) - raise TryAgain + raise _InternalTryAgain return _wrapper diff --git a/packages/service-library/src/servicelib/bytes_iters/__init__.py b/packages/service-library/src/servicelib/bytes_iters/__init__.py new file mode 100644 index 00000000000..9d4fb6704df --- /dev/null +++ b/packages/service-library/src/servicelib/bytes_iters/__init__.py @@ -0,0 +1,15 @@ +from ._constants import DEFAULT_READ_CHUNK_SIZE +from ._input import DiskStreamReader +from ._models import BytesStreamer +from ._output import DiskStreamWriter +from ._stream_zip import ArchiveEntries, ArchiveFileEntry, get_zip_bytes_iter + +__all__: tuple[str, ...] = ( + "ArchiveEntries", + "ArchiveFileEntry", + "BytesStreamer", + "DEFAULT_READ_CHUNK_SIZE", + "DiskStreamReader", + "DiskStreamWriter", + "get_zip_bytes_iter", +) diff --git a/packages/service-library/src/servicelib/bytes_iters/_constants.py b/packages/service-library/src/servicelib/bytes_iters/_constants.py new file mode 100644 index 00000000000..d7259d34b7a --- /dev/null +++ b/packages/service-library/src/servicelib/bytes_iters/_constants.py @@ -0,0 +1,5 @@ +from typing import Final + +from pydantic import ByteSize, TypeAdapter + +DEFAULT_READ_CHUNK_SIZE: Final[int] = TypeAdapter(ByteSize).validate_python("1MiB") diff --git a/packages/service-library/src/servicelib/bytes_iters/_input.py b/packages/service-library/src/servicelib/bytes_iters/_input.py new file mode 100644 index 00000000000..becec0981fc --- /dev/null +++ b/packages/service-library/src/servicelib/bytes_iters/_input.py @@ -0,0 +1,25 @@ +from pathlib import Path + +import aiofiles +from models_library.bytes_iters import BytesIter, DataSize + +from ._constants import DEFAULT_READ_CHUNK_SIZE +from ._models import BytesStreamer + + +class DiskStreamReader: + def __init__(self, file_path: Path, *, chunk_size=DEFAULT_READ_CHUNK_SIZE): + self.file_path = file_path + self.chunk_size = chunk_size + + def get_bytes_streamer(self) -> BytesStreamer: + async def _() -> BytesIter: + async with aiofiles.open(self.file_path, "rb") as f: + while True: + chunk = await f.read(self.chunk_size) + if not chunk: + break + + yield chunk + + return BytesStreamer(DataSize(self.file_path.stat().st_size), _) diff --git a/packages/service-library/src/servicelib/bytes_iters/_models.py b/packages/service-library/src/servicelib/bytes_iters/_models.py new file mode 100644 index 00000000000..9eeec804b5b --- /dev/null +++ b/packages/service-library/src/servicelib/bytes_iters/_models.py @@ -0,0 +1,18 @@ +from dataclasses import dataclass + +from models_library.bytes_iters import BytesIter, BytesIterCallable, DataSize + +from ..progress_bar import ProgressBarData + + +@dataclass(frozen=True) +class BytesStreamer: + data_size: DataSize + bytes_iter_callable: BytesIterCallable + + async def with_progress_bytes_iter( + self, progress_bar: ProgressBarData + ) -> BytesIter: + async for chunk in self.bytes_iter_callable(): + await progress_bar.update(len(chunk)) + yield chunk diff --git a/packages/service-library/src/servicelib/bytes_iters/_output.py b/packages/service-library/src/servicelib/bytes_iters/_output.py new file mode 100644 index 00000000000..9995ce4d33b --- /dev/null +++ b/packages/service-library/src/servicelib/bytes_iters/_output.py @@ -0,0 +1,29 @@ +from pathlib import Path + +import aiofiles +from models_library.bytes_iters import BytesIter + +from ..s3_utils import FileLikeBytesIterReader + + +class DiskStreamWriter: + def __init__(self, destination_path: Path): + self.destination_path = destination_path + + async def write_from_bytes_iter(self, stream: BytesIter) -> None: + async with aiofiles.open(self.destination_path, "wb") as f: + async for chunk in stream: + await f.write(chunk) + await f.flush() + + async def write_from_file_like( + self, file_like_reader: FileLikeBytesIterReader + ) -> None: + async with aiofiles.open(self.destination_path, "wb") as f: + while True: + chunk = await file_like_reader.read(100) + if not chunk: + break + + await f.write(chunk) + await f.flush() diff --git a/packages/service-library/src/servicelib/bytes_iters/_stream_zip.py b/packages/service-library/src/servicelib/bytes_iters/_stream_zip.py new file mode 100644 index 00000000000..3f1f89a0e49 --- /dev/null +++ b/packages/service-library/src/servicelib/bytes_iters/_stream_zip.py @@ -0,0 +1,54 @@ +from collections.abc import AsyncIterable +from datetime import UTC, datetime +from stat import S_IFREG +from typing import TypeAlias + +from models_library.bytes_iters import BytesIter, DataSize +from stream_zip import ZIP_32, AsyncMemberFile, async_stream_zip + +from ..progress_bar import ProgressBarData +from ._models import BytesStreamer + +FileNameInArchive: TypeAlias = str +ArchiveFileEntry: TypeAlias = tuple[FileNameInArchive, BytesStreamer] +ArchiveEntries: TypeAlias = list[ArchiveFileEntry] + + +async def _member_files_iter( + archive_entries: ArchiveEntries, progress_bar: ProgressBarData +) -> AsyncIterable[AsyncMemberFile]: + for file_name, byte_streamer in archive_entries: + yield ( + file_name, + datetime.now(UTC), + S_IFREG | 0o600, + ZIP_32, + byte_streamer.with_progress_bytes_iter(progress_bar=progress_bar), + ) + + +async def get_zip_bytes_iter( + archive_entries: ArchiveEntries, + *, + progress_bar: ProgressBarData | None = None, + chunk_size: int, +) -> BytesIter: + # NOTE: this is CPU bound task, even though the loop is not blocked, + # the CPU is still used for compressing the content. + if progress_bar is None: + progress_bar = ProgressBarData(num_steps=1, description="zip archive stream") + + total_stream_lenth = DataSize( + sum(bytes_streamer.data_size for _, bytes_streamer in archive_entries) + ) + description = f"files: count={len(archive_entries)}, size={total_stream_lenth.human_readable()}" + + async with progress_bar.sub_progress( + steps=total_stream_lenth, description=description, progress_unit="Byte" + ) as sub_progress: + # NOTE: do not disable compression or the streams will be + # loaded fully in memory before yielding their content + async for chunk in async_stream_zip( + _member_files_iter(archive_entries, sub_progress), chunk_size=chunk_size + ): + yield chunk diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index 2ce1fab2fb5..98c897e3eca 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -1,3 +1,4 @@ +import asyncio import logging from collections.abc import Awaitable, Callable from contextlib import AsyncExitStack @@ -11,8 +12,21 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import ProgressDetail from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, ConfigDict, TypeAdapter, ValidationError +from pydantic import ( + BaseModel, + ByteSize, + ConfigDict, + NonNegativeInt, + TypeAdapter, + ValidationError, +) from settings_library.docker_registry import RegistrySettings +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_attempt, + wait_random_exponential, +) from yarl import URL from .logging_utils import LogLevelInt @@ -209,6 +223,8 @@ async def pull_image( progress_bar: ProgressBarData, log_cb: LogCB, image_information: DockerImageManifestsV2 | None, + *, + retry_upon_error_count: NonNegativeInt = 10, ) -> None: """pull a docker image to the host machine. @@ -219,7 +235,9 @@ async def pull_image( progress_bar -- the current progress bar log_cb -- a callback function to send logs to image_information -- the image layer information. If this is None, then no fine progress will be retrieved. + retry_upon_error_count -- number of tries if there is a TimeoutError. Usually cased by networking issues. """ + registry_auth = None if registry_settings.REGISTRY_URL and registry_settings.REGISTRY_URL in image: registry_auth = { @@ -245,39 +263,64 @@ async def pull_image( client = await exit_stack.enter_async_context(aiodocker.Docker()) - reported_progress = 0.0 - async for pull_progress in client.images.pull( - image, stream=True, auth=registry_auth - ): - try: - parsed_progress = TypeAdapter(_DockerPullImage).validate_python( - pull_progress + def _reset_progress_from_previous_attempt() -> None: + for pulled_status in layer_id_to_size.values(): + pulled_status.downloaded = 0 + pulled_status.extracted = 0 + + attempt: NonNegativeInt = 1 + + @retry( + wait=wait_random_exponential(), + stop=stop_after_attempt(retry_upon_error_count), + reraise=True, + retry=retry_if_exception_type(asyncio.TimeoutError), + ) + async def _pull_image_with_retry() -> None: + nonlocal attempt + if attempt > 1: + # for each attempt rest the progress + progress_bar.reset() + _reset_progress_from_previous_attempt() + attempt += 1 + + _logger.info("attempt '%s' trying to pull image='%s'", attempt, image) + + reported_progress = 0.0 + async for pull_progress in client.images.pull( + image, stream=True, auth=registry_auth + ): + try: + parsed_progress = TypeAdapter(_DockerPullImage).validate_python( + pull_progress + ) + except ValidationError: + _logger.exception( + "Unexpected error while validating '%s'. " + "TIP: This is probably an unforeseen pull status text that shall be added to the code. " + "The pulling process will still continue.", + f"{pull_progress=}", + ) + else: + await _parse_pull_information( + parsed_progress, layer_id_to_size=layer_id_to_size + ) + + # compute total progress + total_downloaded_size = sum( + layer.downloaded for layer in layer_id_to_size.values() ) - except ValidationError: - _logger.exception( - "Unexpected error while validating '%s'. " - "TIP: This is probably an unforeseen pull status text that shall be added to the code. " - "The pulling process will still continue.", - f"{pull_progress=}", + total_extracted_size = sum( + layer.extracted for layer in layer_id_to_size.values() ) - else: - await _parse_pull_information( - parsed_progress, layer_id_to_size=layer_id_to_size + total_progress = (total_downloaded_size + total_extracted_size) / 2.0 + progress_to_report = total_progress - reported_progress + await progress_bar.update(progress_to_report) + reported_progress = total_progress + + await log_cb( + f"pulling {image_short_name}: {pull_progress}...", + logging.DEBUG, ) - # compute total progress - total_downloaded_size = sum( - layer.downloaded for layer in layer_id_to_size.values() - ) - total_extracted_size = sum( - layer.extracted for layer in layer_id_to_size.values() - ) - total_progress = (total_downloaded_size + total_extracted_size) / 2.0 - progress_to_report = total_progress - reported_progress - await progress_bar.update(progress_to_report) - reported_progress = total_progress - - await log_cb( - f"pulling {image_short_name}: {pull_progress}...", - logging.DEBUG, - ) + await _pull_image_with_retry() diff --git a/packages/service-library/src/servicelib/fastapi/cancellation_middleware.py b/packages/service-library/src/servicelib/fastapi/cancellation_middleware.py new file mode 100644 index 00000000000..8116869af5d --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/cancellation_middleware.py @@ -0,0 +1,80 @@ +import asyncio +import logging +from typing import NoReturn + +from starlette.requests import Request +from starlette.types import ASGIApp, Message, Receive, Scope, Send + +from ..logging_utils import log_context + +_logger = logging.getLogger(__name__) + + +class _TerminateTaskGroupError(Exception): + pass + + +async def _message_poller( + request: Request, queue: asyncio.Queue, receive: Receive +) -> NoReturn: + while True: + message = await receive() + if message["type"] == "http.disconnect": + _logger.debug( + "client disconnected, terminating request to %s!", request.url + ) + raise _TerminateTaskGroupError + + # Puts the message in the queue + await queue.put(message) + + +async def _handler( + app: ASGIApp, scope: Scope, queue: asyncio.Queue[Message], send: Send +) -> None: + return await app(scope, queue.get, send) + + +class RequestCancellationMiddleware: + """ASGI Middleware to cancel server requests in case of client disconnection. + Reason: FastAPI-based (e.g. starlette) servers do not automatically cancel + server requests in case of client disconnection. This middleware will cancel + the server request in case of client disconnection via asyncio.CancelledError. + + WARNING: FastAPI BackgroundTasks will also get cancelled. Use with care. + TIP: use asyncio.Task in that case + """ + + def __init__(self, app: ASGIApp) -> None: + self.app = app + _logger.warning( + "CancellationMiddleware is in use, in case of client disconection, " + "FastAPI BackgroundTasks will be cancelled too!", + ) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + # Let's make a shared queue for the request messages + queue: asyncio.Queue[Message] = asyncio.Queue() + request = Request(scope) + + with log_context(_logger, logging.DEBUG, f"cancellable request {request.url}"): + try: + async with asyncio.TaskGroup() as tg: + handler_task = tg.create_task( + _handler(self.app, scope, queue, send) + ) + poller_task = tg.create_task( + _message_poller(request, queue, receive) + ) + await handler_task + poller_task.cancel() + except* _TerminateTaskGroupError: + if not handler_task.done(): + _logger.info( + "The client disconnected. request to %s was cancelled.", + request.url, + ) diff --git a/services/director/src/simcore_service_director/client_session.py b/packages/service-library/src/servicelib/fastapi/client_session.py similarity index 68% rename from services/director/src/simcore_service_director/client_session.py rename to packages/service-library/src/servicelib/fastapi/client_session.py index de700737399..c3bc2728c64 100644 --- a/services/director/src/simcore_service_director/client_session.py +++ b/packages/service-library/src/servicelib/fastapi/client_session.py @@ -2,9 +2,12 @@ from fastapi import FastAPI -def setup_client_session(app: FastAPI) -> None: +def setup_client_session(app: FastAPI, *, max_keepalive_connections: int = 20) -> None: async def on_startup() -> None: - session = httpx.AsyncClient(transport=httpx.AsyncHTTPTransport(http2=True)) + session = httpx.AsyncClient( + transport=httpx.AsyncHTTPTransport(http2=True), + limits=httpx.Limits(max_keepalive_connections=max_keepalive_connections), + ) app.state.aiohttp_client_session = session async def on_shutdown() -> None: diff --git a/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py b/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py index a45e5dc2145..920f68008ae 100644 --- a/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py +++ b/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py @@ -5,6 +5,7 @@ from simcore_postgres_database.utils_aiosqlalchemy import ( # type: ignore[import-not-found] # this on is unclear get_pg_engine_stateinfo, ) +from sqlalchemy.ext.asyncio import AsyncEngine from ..db_asyncpg_utils import create_async_engine_and_pg_database_ready from ..logging_utils import log_context @@ -31,3 +32,8 @@ async def close_db_connection(app: FastAPI) -> None: with log_context(_logger, logging.DEBUG, f"db disconnect of {app.state.engine}"): if engine := app.state.engine: await engine.dispose() + + +def get_engine(app: FastAPI) -> AsyncEngine: + assert isinstance(app.state.engine, AsyncEngine) # nosec + return app.state.engine diff --git a/packages/service-library/src/servicelib/fastapi/docker.py b/packages/service-library/src/servicelib/fastapi/docker.py new file mode 100644 index 00000000000..058c0c676b5 --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/docker.py @@ -0,0 +1,83 @@ +import asyncio +import logging +from collections.abc import AsyncIterator +from contextlib import AsyncExitStack +from typing import Final + +import aiodocker +import aiohttp +import tenacity +from aiohttp import ClientSession +from fastapi import FastAPI +from fastapi_lifespan_manager import State +from pydantic import NonNegativeInt +from servicelib.fastapi.lifespan_utils import LifespanGenerator +from settings_library.docker_api_proxy import DockerApiProxysettings + +_logger = logging.getLogger(__name__) + +_DEFAULT_DOCKER_API_PROXY_HEALTH_TIMEOUT: Final[NonNegativeInt] = 5 + + +def get_lifespan_remote_docker_client( + settings: DockerApiProxysettings, +) -> LifespanGenerator: + async def _(app: FastAPI) -> AsyncIterator[State]: + + session: ClientSession | None = None + if settings.DOCKER_API_PROXY_USER and settings.DOCKER_API_PROXY_PASSWORD: + session = ClientSession( + auth=aiohttp.BasicAuth( + login=settings.DOCKER_API_PROXY_USER, + password=settings.DOCKER_API_PROXY_PASSWORD.get_secret_value(), + ) + ) + + async with AsyncExitStack() as exit_stack: + if settings.DOCKER_API_PROXY_USER and settings.DOCKER_API_PROXY_PASSWORD: + await exit_stack.enter_async_context( + ClientSession( + auth=aiohttp.BasicAuth( + login=settings.DOCKER_API_PROXY_USER, + password=settings.DOCKER_API_PROXY_PASSWORD.get_secret_value(), + ) + ) + ) + + client = await exit_stack.enter_async_context( + aiodocker.Docker(url=settings.base_url, session=session) + ) + + app.state.remote_docker_client = client + + await wait_till_docker_api_proxy_is_responsive(app) + + # NOTE this has to be inside exit_stack scope + yield {} + + return _ + + +@tenacity.retry( + wait=tenacity.wait_fixed(5), + stop=tenacity.stop_after_delay(60), + before_sleep=tenacity.before_sleep_log(_logger, logging.WARNING), + reraise=True, +) +async def wait_till_docker_api_proxy_is_responsive(app: FastAPI) -> None: + await is_docker_api_proxy_ready(app) + + +async def is_docker_api_proxy_ready( + app: FastAPI, *, timeout=_DEFAULT_DOCKER_API_PROXY_HEALTH_TIMEOUT # noqa: ASYNC109 +) -> bool: + try: + await asyncio.wait_for(get_remote_docker_client(app).version(), timeout=timeout) + except (aiodocker.DockerError, TimeoutError): + return False + return True + + +def get_remote_docker_client(app: FastAPI) -> aiodocker.Docker: + assert isinstance(app.state.remote_docker_client, aiodocker.Docker) # nosec + return app.state.remote_docker_client diff --git a/packages/service-library/src/servicelib/fastapi/docker_utils.py b/packages/service-library/src/servicelib/fastapi/docker_utils.py index 420c1418873..20900916963 100644 --- a/packages/service-library/src/servicelib/fastapi/docker_utils.py +++ b/packages/service-library/src/servicelib/fastapi/docker_utils.py @@ -3,7 +3,6 @@ from typing import Final import httpx -from models_library.basic_types import IDStr from models_library.docker import DockerGenericTag from pydantic import ByteSize, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings @@ -129,9 +128,7 @@ async def pull_images( num_steps=images_total_size, progress_report_cb=progress_cb, progress_unit="Byte", - description=IDStr( - f"pulling {len(images)} images", - ), + description=f"pulling {len(images)} images", ) as pbar: await asyncio.gather( diff --git a/packages/service-library/src/servicelib/fastapi/http_error.py b/packages/service-library/src/servicelib/fastapi/http_error.py new file mode 100644 index 00000000000..8640fbf2dbb --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/http_error.py @@ -0,0 +1,115 @@ +from collections.abc import Awaitable, Callable +from typing import TypeVar + +from fastapi import FastAPI, HTTPException, status +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import RequestValidationError +from fastapi.openapi.constants import REF_PREFIX +from fastapi.openapi.utils import validation_error_response_definition +from fastapi.requests import Request +from fastapi.responses import JSONResponse +from pydantic import ValidationError + +validation_error_response_definition["properties"] = { + "errors": { + "title": "Validation errors", + "type": "array", + "items": {"$ref": f"{REF_PREFIX}ValidationError"}, + }, +} + + +TException = TypeVar("TException") + + +def make_http_error_handler_for_exception( + status_code: int, + exception_cls: type[TException], + *, + envelope_error: bool, + error_extractor: Callable[[TException], list[str]] | None = None, +) -> Callable[[Request, Exception], Awaitable[JSONResponse]]: + """ + Produces a handler for BaseException-type exceptions which converts them + into an error JSON response with a given status code + + SEE https://docs.python.org/3/library/exceptions.html#concrete-exceptions + """ + + async def _http_error_handler(_: Request, exc: Exception) -> JSONResponse: + assert isinstance(exc, exception_cls) # nosec + error_content = { + "errors": error_extractor(exc) if error_extractor else [f"{exc}"] + } + + return JSONResponse( + content=jsonable_encoder( + {"error": error_content} if envelope_error else error_content + ), + status_code=status_code, + ) + + return _http_error_handler + + +def _request_validation_error_extractor( + validation_error: RequestValidationError, +) -> list[str]: + return [f"{e}" for e in validation_error.errors()] + + +def _make_default_http_error_handler( + *, envelope_error: bool +) -> Callable[[Request, Exception], Awaitable[JSONResponse]]: + async def _http_error_handler(_: Request, exc: Exception) -> JSONResponse: + assert isinstance(exc, HTTPException) + + error_content = {"errors": [exc.detail]} + + return JSONResponse( + content=jsonable_encoder( + {"error": error_content} if envelope_error else error_content + ), + status_code=exc.status_code, + ) + + return _http_error_handler + + +def set_app_default_http_error_handlers(app: FastAPI) -> None: + app.add_exception_handler( + HTTPException, _make_default_http_error_handler(envelope_error=True) + ) + + app.add_exception_handler( + RequestValidationError, + make_http_error_handler_for_exception( + status.HTTP_422_UNPROCESSABLE_ENTITY, + RequestValidationError, + envelope_error=True, + error_extractor=_request_validation_error_extractor, + ), + ) + + app.add_exception_handler( + ValidationError, + make_http_error_handler_for_exception( + status.HTTP_500_INTERNAL_SERVER_ERROR, + ValidationError, + envelope_error=True, + ), + ) + + # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy + app.add_exception_handler( + NotImplementedError, + make_http_error_handler_for_exception( + status.HTTP_501_NOT_IMPLEMENTED, NotImplementedError, envelope_error=True + ), + ) + app.add_exception_handler( + Exception, + make_http_error_handler_for_exception( + status.HTTP_500_INTERNAL_SERVER_ERROR, Exception, envelope_error=True + ), + ) diff --git a/packages/service-library/src/servicelib/fastapi/lifespan_utils.py b/packages/service-library/src/servicelib/fastapi/lifespan_utils.py new file mode 100644 index 00000000000..ee2808078e6 --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/lifespan_utils.py @@ -0,0 +1,20 @@ +from collections.abc import AsyncIterator +from typing import Protocol + +from fastapi import FastAPI +from fastapi_lifespan_manager import LifespanManager, State + + +class LifespanGenerator(Protocol): + def __call__(self, app: FastAPI) -> AsyncIterator["State"]: + ... + + +def combine_lifespans(*generators: LifespanGenerator) -> LifespanManager: + + manager = LifespanManager() + + for generator in generators: + manager.add(generator) + + return manager diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py index 80d6fb5ceab..260a9e9d0b3 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py @@ -1,4 +1,6 @@ -from fastapi import APIRouter, Depends, Request, status +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, Query, Request, status from ...long_running_tasks._errors import TaskNotCompletedError, TaskNotFoundError from ...long_running_tasks._models import TaskGet, TaskId, TaskResult, TaskStatus @@ -12,7 +14,7 @@ @router.get("", response_model=list[TaskGet]) @cancel_on_disconnect async def list_tasks( - request: Request, tasks_manager: TasksManager = Depends(get_tasks_manager) + request: Request, tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)] ) -> list[TaskGet]: assert request # nosec return [ @@ -29,6 +31,7 @@ async def list_tasks( @router.get( "/{task_id}", + response_model=TaskStatus, responses={ status.HTTP_404_NOT_FOUND: {"description": "Task does not exist"}, }, @@ -37,7 +40,7 @@ async def list_tasks( async def get_task_status( request: Request, task_id: TaskId, - tasks_manager: TasksManager = Depends(get_tasks_manager), + tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], ) -> TaskStatus: assert request # nosec return tasks_manager.get_task_status(task_id=task_id, with_task_context=None) @@ -56,12 +59,17 @@ async def get_task_status( async def get_task_result( request: Request, task_id: TaskId, - tasks_manager: TasksManager = Depends(get_tasks_manager), -) -> TaskResult: + tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], + *, + return_exception: Annotated[bool, Query()] = False, +) -> TaskResult | Any: assert request # nosec # TODO: refactor this to use same as in https://github.com/ITISFoundation/osparc-simcore/issues/3265 try: - task_result = tasks_manager.get_task_result_old(task_id=task_id) + if return_exception: + task_result = tasks_manager.get_task_result(task_id, with_task_context=None) + else: + task_result = tasks_manager.get_task_result_old(task_id=task_id) await tasks_manager.remove_task( task_id, with_task_context=None, reraise_errors=False ) @@ -89,7 +97,7 @@ async def get_task_result( async def cancel_and_delete_task( request: Request, task_id: TaskId, - tasks_manager: TasksManager = Depends(get_tasks_manager), + tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], ) -> None: assert request # nosec await tasks_manager.remove_task(task_id, with_task_context=None) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py index 53ad39da916..c82bde0fe4e 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py @@ -2,28 +2,163 @@ Provides a convenient way to return the result given a TaskId. """ +import asyncio +import logging +from collections.abc import AsyncGenerator +from typing import Any + +import httpx +from fastapi import status +from models_library.api_schemas_long_running_tasks.base import TaskProgress +from models_library.api_schemas_long_running_tasks.tasks import TaskGet, TaskStatus +from tenacity import ( + AsyncRetrying, + TryAgain, + before_sleep_log, + retry, + retry_if_exception_type, + stop_after_delay, + wait_random_exponential, +) +from yarl import URL + +from ...long_running_tasks._constants import DEFAULT_POLL_INTERVAL_S, HOUR from ...long_running_tasks._errors import TaskClientResultError from ...long_running_tasks._models import ( ClientConfiguration, + LRTask, ProgressCallback, ProgressMessage, ProgressPercent, + RequestBody, ) from ...long_running_tasks._task import TaskId, TaskResult +from ...rest_responses import unwrap_envelope_if_required from ._client import DEFAULT_HTTP_REQUESTS_TIMEOUT, Client, setup from ._context_manager import periodic_task_result +_logger = logging.getLogger(__name__) + + +_DEFAULT_FASTAPI_RETRY_POLICY: dict[str, Any] = { + "retry": retry_if_exception_type(httpx.RequestError), + "wait": wait_random_exponential(max=20), + "stop": stop_after_delay(60), + "reraise": True, + "before_sleep": before_sleep_log(_logger, logging.INFO), +} + + +@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) +async def _start( + session: httpx.AsyncClient, url: URL, json: RequestBody | None +) -> TaskGet: + response = await session.post(f"{url}", json=json) + response.raise_for_status() + data = unwrap_envelope_if_required(response.json()) + return TaskGet.model_validate(data) + + +@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) +async def _wait_for_completion( + session: httpx.AsyncClient, + task_id: TaskId, + status_url: URL, + client_timeout: int, +) -> AsyncGenerator[TaskProgress, None]: + try: + async for attempt in AsyncRetrying( + stop=stop_after_delay(client_timeout), + reraise=True, + retry=retry_if_exception_type(TryAgain), + before_sleep=before_sleep_log(_logger, logging.DEBUG), + ): + with attempt: + response = await session.get(f"{status_url}") + response.raise_for_status() + data = unwrap_envelope_if_required(response.json()) + task_status = TaskStatus.model_validate(data) + + yield task_status.task_progress + if not task_status.done: + await asyncio.sleep( + float( + response.headers.get("retry-after", DEFAULT_POLL_INTERVAL_S) + ) + ) + msg = f"{task_id=}, {task_status.started=} has status: '{task_status.task_progress.message}' {task_status.task_progress.percent}%" + raise TryAgain(msg) # noqa: TRY301 + + except TryAgain as exc: + # this is a timeout + msg = f"Long running task {task_id}, calling to {status_url} timed-out after {client_timeout} seconds" + raise TimeoutError(msg) from exc + + +@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) +async def _task_result(session: httpx.AsyncClient, result_url: URL) -> Any: + response = await session.get(f"{result_url}", params={"return_exception": True}) + response.raise_for_status() + if response.status_code != status.HTTP_204_NO_CONTENT: + return unwrap_envelope_if_required(response.json()) + return None + + +@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) +async def _abort_task(session: httpx.AsyncClient, abort_url: URL) -> None: + response = await session.delete(f"{abort_url}") + response.raise_for_status() + + +async def long_running_task_request( + session: httpx.AsyncClient, + url: URL, + json: RequestBody | None = None, + client_timeout: int = 1 * HOUR, +) -> AsyncGenerator[LRTask, None]: + """Will use the passed `httpx.AsyncClient` to call an oSparc long + running task `url` passing `json` as request body. + NOTE: this follows the usual aiohttp client syntax, and will raise the same errors + + Raises: + [https://docs.aiohttp.org/en/stable/client_reference.html#hierarchy-of-exceptions] + """ + task = None + try: + task = await _start(session, url, json) + last_progress = None + async for task_progress in _wait_for_completion( + session, + task.task_id, + URL(task.status_href), + client_timeout, + ): + last_progress = task_progress + yield LRTask(progress=task_progress) + assert last_progress # nosec + yield LRTask( + progress=last_progress, + _result=_task_result(session, URL(task.result_href)), + ) + + except (TimeoutError, asyncio.CancelledError): + if task: + await _abort_task(session, URL(task.abort_href)) + raise + + __all__: tuple[str, ...] = ( + "DEFAULT_HTTP_REQUESTS_TIMEOUT", "Client", "ClientConfiguration", - "DEFAULT_HTTP_REQUESTS_TIMEOUT", - "periodic_task_result", + "LRTask", "ProgressCallback", "ProgressMessage", "ProgressPercent", - "setup", "TaskClientResultError", "TaskId", "TaskResult", + "periodic_task_result", + "setup", ) # nopycln: file diff --git a/packages/service-library/src/servicelib/fastapi/profiler_middleware.py b/packages/service-library/src/servicelib/fastapi/profiler.py similarity index 91% rename from packages/service-library/src/servicelib/fastapi/profiler_middleware.py rename to packages/service-library/src/servicelib/fastapi/profiler.py index 43c46c7ba9d..cb3e7c5c084 100644 --- a/packages/service-library/src/servicelib/fastapi/profiler_middleware.py +++ b/packages/service-library/src/servicelib/fastapi/profiler.py @@ -1,5 +1,6 @@ from typing import Any, Final +from fastapi import FastAPI from servicelib.aiohttp import status from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from starlette.requests import Request @@ -13,7 +14,7 @@ ) -def is_last_response(response_headers: dict[bytes, bytes], message: dict[str, Any]): +def _is_last_response(response_headers: dict[bytes, bytes], message: dict[str, Any]): if ( content_type := response_headers.get(b"content-type") ) and content_type == MIMETYPE_APPLICATION_JSON.encode(): @@ -79,7 +80,7 @@ async def _send_wrapper(message): response_headers = dict(message.get("headers")) message["headers"] = check_response_headers(response_headers) elif message["type"] == "http.response.body": - if is_last_response(response_headers, message): + if _is_last_response(response_headers, message): _profiler.stop() profile_text = _profiler.output_text( unicode=True, color=True, show_all=True @@ -96,3 +97,8 @@ async def _send_wrapper(message): finally: _profiler.reset() + + +def initialize_profiler(app: FastAPI) -> None: + # NOTE: this cannot be ran once the application is started + app.add_middleware(ProfilerMiddleware) diff --git a/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py b/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py index 847585c52fc..c9b9fb58170 100644 --- a/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py +++ b/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py @@ -1,28 +1,62 @@ # pylint: disable=protected-access +from collections.abc import AsyncIterator + from fastapi import FastAPI +from fastapi_lifespan_manager import State from prometheus_client import CollectorRegistry from prometheus_fastapi_instrumentator import Instrumentator -def setup_prometheus_instrumentation(app: FastAPI) -> Instrumentator: +def initialize_prometheus_instrumentation(app: FastAPI) -> None: + # NOTE: this cannot be ran once the application is started + # NOTE: use that registry to prevent having a global one app.state.prometheus_registry = registry = CollectorRegistry(auto_describe=True) - instrumentator = Instrumentator( + app.state.prometheus_instrumentator = Instrumentator( should_instrument_requests_inprogress=False, # bug in https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/317 inprogress_labels=False, registry=registry, - ).instrument(app) + ) + app.state.prometheus_instrumentator.instrument(app) + + +def _startup(app: FastAPI) -> None: + assert isinstance(app.state.prometheus_instrumentator, Instrumentator) # nosec + app.state.prometheus_instrumentator.expose(app, include_in_schema=False) + + +def _shutdown(app: FastAPI) -> None: + assert isinstance(app.state.prometheus_registry, CollectorRegistry) # nosec + registry = app.state.prometheus_registry + for collector in list(registry._collector_to_names.keys()): # noqa: SLF001 + registry.unregister(collector) + + +def get_prometheus_instrumentator(app: FastAPI) -> Instrumentator: + assert isinstance(app.state.prometheus_instrumentator, Instrumentator) # nosec + return app.state.prometheus_instrumentator + + +def setup_prometheus_instrumentation(app: FastAPI) -> Instrumentator: + initialize_prometheus_instrumentation(app) async def _on_startup() -> None: - instrumentator.expose(app, include_in_schema=False) + _startup(app) - def _unregister() -> None: - # NOTE: avoid registering collectors multiple times when running unittests consecutively (https://stackoverflow.com/a/62489287) - for collector in list(registry._collector_to_names.keys()): # noqa: SLF001 - registry.unregister(collector) + def _on_shutdown() -> None: + _shutdown(app) app.add_event_handler("startup", _on_startup) - app.add_event_handler("shutdown", _unregister) - return instrumentator + app.add_event_handler("shutdown", _on_shutdown) + + return get_prometheus_instrumentator(app) + + +async def lifespan_prometheus_instrumentation(app: FastAPI) -> AsyncIterator[State]: + # NOTE: requires ``initialize_prometheus_instrumentation`` to be called before the + # lifespan of the applicaiton runs, usually rigth after the ``FastAPI`` instance is created + _startup(app) + yield {} + _shutdown(app) diff --git a/packages/service-library/src/servicelib/fastapi/rest_pagination.py b/packages/service-library/src/servicelib/fastapi/rest_pagination.py new file mode 100644 index 00000000000..0a199152ace --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/rest_pagination.py @@ -0,0 +1,28 @@ +from typing import TypeAlias, TypeVar + +from fastapi import Query +from fastapi_pagination.cursor import CursorPage # type: ignore[import-not-found] +from fastapi_pagination.customization import ( # type: ignore[import-not-found] + CustomizedPage, + UseParamsFields, +) +from models_library.api_schemas_storage.storage_schemas import ( + DEFAULT_NUMBER_OF_PATHS_PER_PAGE, + MAX_NUMBER_OF_PATHS_PER_PAGE, +) + +_T = TypeVar("_T") + +CustomizedPathsCursorPage = CustomizedPage[ + CursorPage[_T], + # Customizes the maximum value to fit frontend needs + UseParamsFields( + size=Query( + DEFAULT_NUMBER_OF_PATHS_PER_PAGE, + ge=1, + le=MAX_NUMBER_OF_PATHS_PER_PAGE, + description="Page size", + ) + ), +] +CustomizedPathsCursorPageParams: TypeAlias = CustomizedPathsCursorPage.__params_type__ # type: ignore diff --git a/packages/service-library/src/servicelib/fastapi/tracing.py b/packages/service-library/src/servicelib/fastapi/tracing.py index e2cce12c319..e26cc367064 100644 --- a/packages/service-library/src/servicelib/fastapi/tracing.py +++ b/packages/service-library/src/servicelib/fastapi/tracing.py @@ -61,7 +61,7 @@ HAS_REQUESTS = False -def setup_tracing( +def initialize_tracing( app: FastAPI, tracing_settings: TracingSettings, service_name: str ) -> None: if ( diff --git a/packages/service-library/src/servicelib/long_running_tasks/_constants.py b/packages/service-library/src/servicelib/long_running_tasks/_constants.py new file mode 100644 index 00000000000..5cc87208a36 --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/_constants.py @@ -0,0 +1,5 @@ +from typing import Final + +MINUTE: Final[int] = 60 # in secs +HOUR: Final[int] = 60 * MINUTE # in secs +DEFAULT_POLL_INTERVAL_S: Final[float] = 1 diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py index fc240160b81..89fb8b1b399 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_models.py @@ -1,6 +1,7 @@ # mypy: disable-error-code=truthy-function from asyncio import Task from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import dataclass from datetime import datetime from typing import Any, TypeAlias @@ -25,6 +26,8 @@ [ProgressMessage, ProgressPercent | None, TaskId], Awaitable[None] ] +RequestBody: TypeAlias = Any + class TrackedTask(BaseModel): task_id: str @@ -56,6 +59,21 @@ class ClientConfiguration(BaseModel): default_timeout: PositiveFloat +@dataclass(frozen=True) +class LRTask: + progress: TaskProgress + _result: Coroutine[Any, Any, Any] | None = None + + def done(self) -> bool: + return self._result is not None + + async def result(self) -> Any: + if not self._result: + msg = "No result ready!" + raise ValueError(msg) + return await self._result + + # explicit export of models for api-schemas assert TaskResult # nosec @@ -63,11 +81,11 @@ class ClientConfiguration(BaseModel): assert TaskStatus # nosec __all__: tuple[str, ...] = ( + "ProgressMessage", + "ProgressPercent", "TaskGet", "TaskId", + "TaskProgress", "TaskResult", "TaskStatus", - "TaskProgress", - "ProgressPercent", - "ProgressMessage", ) diff --git a/packages/service-library/src/servicelib/progress_bar.py b/packages/service-library/src/servicelib/progress_bar.py index bf70c0c3e88..1f65e44790b 100644 --- a/packages/service-library/src/servicelib/progress_bar.py +++ b/packages/service-library/src/servicelib/progress_bar.py @@ -4,7 +4,6 @@ from inspect import isawaitable from typing import Final, Optional, Protocol, runtime_checkable -from models_library.basic_types import IDStr from models_library.progress_bar import ( ProgressReport, ProgressStructuredMessage, @@ -18,18 +17,17 @@ _MIN_PROGRESS_UPDATE_PERCENT: Final[float] = 0.01 _INITIAL_VALUE: Final[float] = -1.0 _FINAL_VALUE: Final[float] = 1.0 +_PROGRESS_ALREADY_REACGED_MAXIMUM: Final[str] = "Progress already reached maximum of" @runtime_checkable class AsyncReportCB(Protocol): - async def __call__(self, report: ProgressReport) -> None: - ... + async def __call__(self, report: ProgressReport) -> None: ... @runtime_checkable class ReportCB(Protocol): - def __call__(self, report: ProgressReport) -> None: - ... + def __call__(self, report: ProgressReport) -> None: ... def _normalize_weights(steps: int, weights: list[float]) -> list[float]: @@ -84,10 +82,11 @@ async def main_fct(): "description": "Optionally defines the step relative weight (defaults to steps of equal weights)" }, ) - description: IDStr = field(metadata={"description": "define the progress name"}) + description: str = field(metadata={"description": "define the progress name"}) progress_unit: ProgressUnit | None = None progress_report_cb: AsyncReportCB | ReportCB | None = None _current_steps: float = _INITIAL_VALUE + _current_attempt: int = 0 _children: list["ProgressBarData"] = field(default_factory=list) _parent: Optional["ProgressBarData"] = None _continuous_value_lock: asyncio.Lock = field(init=False) @@ -147,6 +146,7 @@ async def _report_external(self, value: float) -> None: # NOTE: here we convert back to actual value since this is possibly weighted actual_value=value * self.num_steps, total=self.num_steps, + attempt=self._current_attempt, unit=self.progress_unit, message=self.compute_report_message_stuct(), ), @@ -176,7 +176,7 @@ async def update(self, steps: float = 1) -> None: if new_steps_value > self.num_steps: _logger.warning( "%s", - f"Progress already reached maximum of {self.num_steps=}, " + f"{_PROGRESS_ALREADY_REACGED_MAXIMUM} {self.num_steps=}, " f"cause: {self._current_steps=} is updated by {steps=}" "TIP: sub progresses are not created correctly please check the stack trace", stack_info=True, @@ -197,6 +197,11 @@ async def update(self, steps: float = 1) -> None: await self._update_parent(parent_update_value) await self._report_external(new_progress_value) + def reset(self) -> None: + self._current_attempt += 1 + self._current_steps = _INITIAL_VALUE + self._last_report_value = _INITIAL_VALUE + async def set_(self, new_value: float) -> None: await self.update(new_value - self._current_steps) @@ -207,7 +212,7 @@ async def finish(self) -> None: def sub_progress( self, steps: int, - description: IDStr, + description: str, step_weights: list[float] | None = None, progress_unit: ProgressUnit | None = None, ) -> "ProgressBarData": diff --git a/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py b/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py index f7b4a81ce62..e34fc874a54 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py +++ b/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py @@ -32,15 +32,18 @@ async def create( cls, *, client_name: str, settings: RabbitSettings, **kwargs ) -> "RabbitMQRPCClient": client = cls(client_name=client_name, settings=settings, **kwargs) - await client._rpc_initialize() # noqa: SLF001 + await client._rpc_initialize() return client async def _rpc_initialize(self) -> None: + # NOTE: to show the connection name in the rabbitMQ UI see there + # https://www.bountysource.com/issues/89342433-setting-custom-connection-name-via-client_properties-doesn-t-work-when-connecting-using-an-amqp-url + # + connection_name = f"{get_rabbitmq_client_unique_name(self.client_name)}.rpc" + url = f"{self.settings.dsn}?name={connection_name}" self._connection = await aio_pika.connect_robust( - self.settings.dsn, - client_properties={ - "connection_name": f"{get_rabbitmq_client_unique_name(self.client_name)}.rpc" - }, + url, + client_properties={"connection_name": connection_name}, ) self._channel = await self._connection.channel() diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/__init__.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/__init__.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/__init__.py rename to packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/__init__.py diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py new file mode 100644 index 00000000000..c50799bda05 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py @@ -0,0 +1,237 @@ +import datetime +import logging +from asyncio import CancelledError +from collections.abc import AsyncGenerator, Awaitable +from typing import Any, Final + +from attr import dataclass +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobId, + AsyncJobNameData, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from pydantic import NonNegativeInt, TypeAdapter +from tenacity import ( + AsyncRetrying, + TryAgain, + before_sleep_log, + retry, + retry_if_exception_type, + stop_after_delay, + wait_fixed, + wait_random_exponential, +) + +from ....rabbitmq import RemoteMethodNotRegisteredError +from ... import RabbitMQRPCClient + +_DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 30 + +_RPC_METHOD_NAME_ADAPTER = TypeAdapter(RPCMethodName) +_DEFAULT_POLL_INTERVAL_S: Final[float] = 0.1 +_logger = logging.getLogger(__name__) + + +async def cancel( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + job_id_data: AsyncJobNameData, +) -> None: + await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("cancel"), + job_id=job_id, + job_id_data=job_id_data, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + + +async def status( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + job_id_data: AsyncJobNameData, +) -> AsyncJobStatus: + _result = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("status"), + job_id=job_id, + job_id_data=job_id_data, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(_result, AsyncJobStatus) + return _result + + +async def result( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + job_id_data: AsyncJobNameData, +) -> AsyncJobResult: + _result = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("result"), + job_id=job_id, + job_id_data=job_id_data, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(_result, AsyncJobResult) + return _result + + +async def list_jobs( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + filter_: str, + job_id_data: AsyncJobNameData, +) -> list[AsyncJobGet]: + _result: list[AsyncJobGet] = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("list_jobs"), + filter_=filter_, + job_id_data=job_id_data, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + return _result + + +async def submit( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + method_name: str, + job_id_data: AsyncJobNameData, + **kwargs, +) -> AsyncJobGet: + _result = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python(method_name), + job_id_data=job_id_data, + **kwargs, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(_result, AsyncJobGet) # nosec + return _result + + +_DEFAULT_RPC_RETRY_POLICY: dict[str, Any] = { + "retry": retry_if_exception_type(RemoteMethodNotRegisteredError), + "wait": wait_random_exponential(max=20), + "stop": stop_after_delay(60), + "reraise": True, + "before_sleep": before_sleep_log(_logger, logging.INFO), +} + + +@retry(**_DEFAULT_RPC_RETRY_POLICY) +async def _wait_for_completion( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + method_name: RPCMethodName, + job_id: AsyncJobId, + job_id_data: AsyncJobNameData, + client_timeout: datetime.timedelta, +) -> AsyncGenerator[AsyncJobStatus, None]: + try: + async for attempt in AsyncRetrying( + stop=stop_after_delay(client_timeout.total_seconds()), + reraise=True, + retry=retry_if_exception_type(TryAgain), + before_sleep=before_sleep_log(_logger, logging.DEBUG), + wait=wait_fixed(_DEFAULT_POLL_INTERVAL_S), + ): + with attempt: + job_status = await status( + rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, + job_id=job_id, + job_id_data=job_id_data, + ) + yield job_status + if not job_status.done: + msg = f"{job_status.job_id=}: '{job_status.progress=}'" + raise TryAgain(msg) # noqa: TRY301 + + except TryAgain as exc: + # this is a timeout + msg = f"Async job {job_id=}, calling to '{method_name}' timed-out after {client_timeout}" + raise TimeoutError(msg) from exc + + +@dataclass(frozen=True) +class AsyncJobComposedResult: + status: AsyncJobStatus + _result: Awaitable[Any] | None = None + + @property + def done(self) -> bool: + return self._result is not None + + async def result(self) -> Any: + if not self._result: + msg = "No result ready!" + raise ValueError(msg) + return await self._result + + +async def submit_and_wait( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + method_name: str, + job_id_data: AsyncJobNameData, + client_timeout: datetime.timedelta, + **kwargs, +) -> AsyncGenerator[AsyncJobComposedResult, None]: + async_job_rpc_get = None + try: + async_job_rpc_get = await submit( + rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, + method_name=method_name, + job_id_data=job_id_data, + **kwargs, + ) + job_status: AsyncJobStatus | None = None + async for job_status in _wait_for_completion( + rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, + method_name=method_name, + job_id=async_job_rpc_get.job_id, + job_id_data=job_id_data, + client_timeout=client_timeout, + ): + assert job_status is not None # nosec + yield AsyncJobComposedResult(job_status) + if job_status: + yield AsyncJobComposedResult( + job_status, + result( + rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, + job_id=async_job_rpc_get.job_id, + job_id_data=job_id_data, + ), + ) + except (TimeoutError, CancelledError) as error: + if async_job_rpc_get is not None: + try: + await cancel( + rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, + job_id=async_job_rpc_get.job_id, + job_id_data=job_id_data, + ) + except Exception as exc: + raise exc from error + raise diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py index d278bb350ba..be5e7c6c4e4 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py @@ -11,3 +11,7 @@ class CatalogItemNotFoundError(CatalogApiBaseError): class CatalogForbiddenError(CatalogApiBaseError): msg_template = "Insufficient access rights for {name}" + + +class CatalogNotAvailableError(CatalogApiBaseError): + msg_template = "Catalog service failed unexpectedly" diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py index 1c168a6d1b1..ca4f8876f59 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py @@ -1,12 +1,15 @@ -""" RPC client-side for the RPC server at the payments service - -""" +"""RPC client-side for the RPC server at the payments service""" import logging from typing import Any, cast from models_library.api_schemas_catalog import CATALOG_RPC_NAMESPACE -from models_library.api_schemas_catalog.services import ServiceGetV2, ServiceUpdateV2 +from models_library.api_schemas_catalog.services import ( + LatestServiceGet, + MyServiceGet, + ServiceGetV2, + ServiceUpdateV2, +) from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from models_library.rpc_pagination import ( @@ -25,7 +28,6 @@ _logger = logging.getLogger(__name__) -@log_decorator(_logger, level=logging.DEBUG) async def list_services_paginated( # pylint: disable=too-many-arguments rpc_client: RabbitMQRPCClient, *, @@ -33,7 +35,7 @@ async def list_services_paginated( # pylint: disable=too-many-arguments user_id: UserID, limit: PageLimitInt = DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, offset: NonNegativeInt = 0, -) -> PageRpc[ServiceGetV2]: +) -> PageRpc[LatestServiceGet]: """ Raises: ValidationError: on invalid arguments @@ -60,10 +62,10 @@ async def _call( result = await _call( product_name=product_name, user_id=user_id, limit=limit, offset=offset ) - assert ( - TypeAdapter(PageRpc[ServiceGetV2]).validate_python(result) is not None - ) # nosec - return cast(PageRpc[ServiceGetV2], result) + assert ( # nosec + TypeAdapter(PageRpc[LatestServiceGet]).validate_python(result) is not None + ) + return cast(PageRpc[LatestServiceGet], result) @log_decorator(_logger, level=logging.DEBUG) @@ -194,3 +196,42 @@ async def _call( service_key=service_key, service_version=service_version, ) + + +@log_decorator(_logger, level=logging.DEBUG) +async def batch_get_my_services( + rpc_client: RabbitMQRPCClient, + *, + product_name: ProductName, + user_id: UserID, + ids: list[ + tuple[ + ServiceKey, + ServiceVersion, + ] + ], +) -> list[MyServiceGet]: + """ + Raises: + ValidationError: on invalid arguments + CatalogForbiddenError: no access-rights to list services + """ + + @validate_call() + async def _call( + product_name: ProductName, + user_id: UserID, + ids: list[tuple[ServiceKey, ServiceVersion]], + ): + return await rpc_client.request( + CATALOG_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("batch_get_my_services"), + product_name=product_name, + user_id=user_id, + ids=ids, + timeout_s=40 * RPC_REQUEST_DEFAULT_TIMEOUT_S, + ) + + result = await _call(product_name=product_name, user_id=user_id, ids=ids) + assert TypeAdapter(list[MyServiceGet]).validate_python(result) is not None # nosec + return cast(list[MyServiceGet], result) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py index 4e24b323edc..9ee0c7a4324 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -34,3 +34,14 @@ class LicensedItemCheckoutNotFoundError(LicensesBaseError): class WalletTransactionError(OsparcErrorMixin, Exception): msg_template = "{msg}" + + +### Pricing Plans Error + + +class PricingPlanBaseError(OsparcErrorMixin, Exception): + ... + + +class PricingUnitDuplicationError(PricingPlanBaseError): + msg_template = "Pricing unit with that name already exists in given product." diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_checkouts.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_checkouts.py index ed8c85dfd37..5203fb9d2d5 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_checkouts.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/licensed_items_checkouts.py @@ -9,7 +9,7 @@ LicensedItemsCheckoutsPage, ) from models_library.basic_types import IDStr -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from models_library.resource_tracker_licensed_items_checkouts import ( @@ -85,6 +85,8 @@ async def checkout_licensed_item( rabbitmq_rpc_client: RabbitMQRPCClient, *, licensed_item_id: LicensedItemID, + key: str, + version: str, wallet_id: WalletID, product_name: ProductName, num_of_seats: int, @@ -96,6 +98,8 @@ async def checkout_licensed_item( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("checkout_licensed_item"), licensed_item_id=licensed_item_id, + key=key, + version=version, wallet_id=wallet_id, product_name=product_name, num_of_seats=num_of_seats, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py index 45107bfa077..4faa6fa3f0c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py @@ -5,9 +5,9 @@ RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, ) from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingPlanPage, PricingPlanToServiceGet, + RutPricingPlanGet, + RutPricingPlanPage, ) from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName @@ -36,20 +36,20 @@ async def get_pricing_plan( *, product_name: ProductName, pricing_plan_id: PricingPlanId, -) -> PricingPlanGet: - result: PricingPlanGet = await rabbitmq_rpc_client.request( +) -> RutPricingPlanGet: + result: RutPricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert isinstance(result, PricingPlanGet) # nosec + assert isinstance(result, RutPricingPlanGet) # nosec return result @log_decorator(_logger, level=logging.DEBUG) -async def list_pricing_plans( +async def list_pricing_plans_without_pricing_units( rabbitmq_rpc_client: RabbitMQRPCClient, *, product_name: ProductName, @@ -57,17 +57,19 @@ async def list_pricing_plans( # pagination offset: int = 0, limit: int = 20, -) -> PricingPlanPage: +) -> RutPricingPlanPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - _RPC_METHOD_NAME_ADAPTER.validate_python("list_pricing_plans"), + _RPC_METHOD_NAME_ADAPTER.validate_python( + "list_pricing_plans_without_pricing_units" + ), product_name=product_name, exclude_inactive=exclude_inactive, offset=offset, limit=limit, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert isinstance(result, PricingPlanPage) # nosec + assert isinstance(result, RutPricingPlanPage) # nosec return result @@ -76,14 +78,14 @@ async def create_pricing_plan( rabbitmq_rpc_client: RabbitMQRPCClient, *, data: PricingPlanCreate, -) -> PricingPlanGet: - result: PricingPlanGet = await rabbitmq_rpc_client.request( +) -> RutPricingPlanGet: + result: RutPricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_plan"), data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert isinstance(result, PricingPlanGet) # nosec + assert isinstance(result, RutPricingPlanGet) # nosec return result @@ -93,15 +95,15 @@ async def update_pricing_plan( *, product_name: ProductName, data: PricingPlanUpdate, -) -> PricingPlanGet: - result: PricingPlanGet = await rabbitmq_rpc_client.request( +) -> RutPricingPlanGet: + result: RutPricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_plan"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert isinstance(result, PricingPlanGet) # nosec + assert isinstance(result, RutPricingPlanGet) # nosec return result @@ -112,7 +114,7 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( product_name: ProductName, pricing_plan_id: PricingPlanId, ) -> list[PricingPlanToServiceGet]: - result: PricingPlanGet = await rabbitmq_rpc_client.request( + result: RutPricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python( "list_connected_services_to_pricing_plan_by_pricing_plan" @@ -134,7 +136,7 @@ async def connect_service_to_pricing_plan( service_key: ServiceKey, service_version: ServiceVersion, ) -> PricingPlanToServiceGet: - result: PricingPlanGet = await rabbitmq_rpc_client.request( + result: RutPricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("connect_service_to_pricing_plan"), product_name=product_name, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py index afa5611a92d..09a47be8281 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py @@ -5,7 +5,7 @@ RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, ) from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingUnitGet, + RutPricingUnitGet, ) from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName @@ -35,8 +35,8 @@ async def get_pricing_unit( product_name: ProductName, pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, -) -> PricingUnitGet: - result: PricingUnitGet = await rabbitmq_rpc_client.request( +) -> RutPricingUnitGet: + result: RutPricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_unit"), product_name=product_name, @@ -44,7 +44,7 @@ async def get_pricing_unit( pricing_unit_id=pricing_unit_id, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert isinstance(result, PricingUnitGet) # nosec + assert isinstance(result, RutPricingUnitGet) # nosec return result @@ -54,15 +54,15 @@ async def create_pricing_unit( *, product_name: ProductName, data: PricingUnitWithCostCreate, -) -> PricingUnitGet: - result: PricingUnitGet = await rabbitmq_rpc_client.request( +) -> RutPricingUnitGet: + result: RutPricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert isinstance(result, PricingUnitGet) # nosec + assert isinstance(result, RutPricingUnitGet) # nosec return result @@ -72,13 +72,13 @@ async def update_pricing_unit( *, product_name: ProductName, data: PricingUnitWithCostUpdate, -) -> PricingUnitGet: - result: PricingUnitGet = await rabbitmq_rpc_client.request( +) -> RutPricingUnitGet: + result: RutPricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert isinstance(result, PricingUnitGet) # nosec + assert isinstance(result, RutPricingUnitGet) # nosec return result diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/__init__.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/__init__.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/__init__.py rename to packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/__init__.py diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/data_export.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/data_export.py new file mode 100644 index 00000000000..cd9770cb688 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/data_export.py @@ -0,0 +1,22 @@ +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobNameData, +) +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from models_library.rabbitmq_basic_types import RPCMethodName +from pydantic import TypeAdapter + +from ... import RabbitMQRPCClient +from ..async_jobs.async_jobs import submit + + +async def start_data_export( + rabbitmq_rpc_client: RabbitMQRPCClient, *, job_id_data: AsyncJobNameData, **kwargs +) -> AsyncJobGet: + return await submit( + rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + method_name=TypeAdapter(RPCMethodName).validate_python("start_data_export"), + job_id_data=job_id_data, + **kwargs, + ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py new file mode 100644 index 00000000000..a549b8fcffc --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py @@ -0,0 +1,33 @@ +from pathlib import Path + +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobNameData, +) +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from models_library.projects_nodes_io import LocationID +from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.users import UserID + +from ..._client_rpc import RabbitMQRPCClient +from ..async_jobs.async_jobs import submit + + +async def compute_path_size( + client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: str, + location_id: LocationID, + path: Path, +) -> tuple[AsyncJobGet, AsyncJobNameData]: + job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) + async_job_rpc_get = await submit( + rabbitmq_rpc_client=client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + method_name=RPCMethodName("compute_path_size"), + job_id_data=job_id_data, + location_id=location_id, + path=path, + ) + return async_job_rpc_get, job_id_data diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py index 4bf0ef898d2..acb367de27b 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py @@ -5,7 +5,7 @@ from models_library.api_schemas_webserver.licensed_items_checkouts import ( LicensedItemCheckoutRpcGet, ) -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from models_library.resource_tracker_licensed_items_checkouts import ( @@ -79,10 +79,10 @@ async def checkout_licensed_item_for_wallet( result = await rabbitmq_rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("checkout_licensed_item_for_wallet"), + licensed_item_id=licensed_item_id, product_name=product_name, user_id=user_id, wallet_id=wallet_id, - licensed_item_id=licensed_item_id, num_of_seats=num_of_seats, service_run_id=service_run_id, ) diff --git a/packages/service-library/src/servicelib/redis/_project_lock.py b/packages/service-library/src/servicelib/redis/_project_lock.py index 12f25e068d9..d618d88c58f 100644 --- a/packages/service-library/src/servicelib/redis/_project_lock.py +++ b/packages/service-library/src/servicelib/redis/_project_lock.py @@ -1,10 +1,12 @@ import functools +import logging from collections.abc import Awaitable, Callable, Coroutine from typing import Any, Final, ParamSpec, TypeVar from models_library.projects import ProjectID from models_library.projects_access import Owner from models_library.projects_state import ProjectLocked, ProjectStatus +from servicelib.logging_utils import log_catch from ._client import RedisClientSDK from ._decorators import exclusive @@ -12,6 +14,7 @@ _PROJECT_REDIS_LOCK_KEY: Final[str] = "project_lock:{}" +_logger = logging.getLogger(__name__) P = ParamSpec("P") R = TypeVar("R") @@ -59,17 +62,20 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: ) async def _exclusive_func(*args, **kwargs) -> R: if notification_cb is not None: - await notification_cb() + with log_catch(_logger, reraise=False): + await notification_cb() return await func(*args, **kwargs) try: - result = await _exclusive_func(*args, **kwargs) - # we are now unlocked - if notification_cb is not None: - await notification_cb() - return result + return await _exclusive_func(*args, **kwargs) + except CouldNotAcquireLockError as e: raise ProjectLockError from e + finally: + # we are now unlocked + if notification_cb is not None: + with log_catch(_logger, reraise=False): + await notification_cb() return _wrapper diff --git a/packages/service-library/src/servicelib/rest_responses.py b/packages/service-library/src/servicelib/rest_responses.py new file mode 100644 index 00000000000..daa37446278 --- /dev/null +++ b/packages/service-library/src/servicelib/rest_responses.py @@ -0,0 +1,40 @@ +import json +from collections.abc import Mapping +from typing import Any + +_ENVELOPE_KEYS = ("data", "error") + + +def is_enveloped_from_map(payload: Mapping) -> bool: + return all(k in _ENVELOPE_KEYS for k in payload if not f"{k}".startswith("_")) + + +def is_enveloped_from_text(text: str) -> bool: + try: + payload = json.loads(text) + except json.decoder.JSONDecodeError: + return False + return is_enveloped_from_map(payload) + + +def is_enveloped(payload: Mapping | str) -> bool: + # pylint: disable=isinstance-second-argument-not-valid-type + if isinstance(payload, Mapping): + return is_enveloped_from_map(payload) + if isinstance(payload, str): + return is_enveloped_from_text(text=payload) + return False + + +def unwrap_envelope(payload: Mapping[str, Any]) -> tuple: + """ + Safe returns (data, error) tuple from a response payload + """ + return tuple(payload.get(k) for k in _ENVELOPE_KEYS) if payload else (None, None) + + +def unwrap_envelope_if_required(data: Mapping) -> Mapping: + if is_enveloped(data): + data, error = unwrap_envelope(data) + assert not error # nosec + return data diff --git a/packages/service-library/src/servicelib/s3_utils.py b/packages/service-library/src/servicelib/s3_utils.py new file mode 100644 index 00000000000..f9492af2e32 --- /dev/null +++ b/packages/service-library/src/servicelib/s3_utils.py @@ -0,0 +1,32 @@ +from typing import Protocol + +from models_library.bytes_iters import BytesIter + + +class FileLikeReader(Protocol): + """minimal interface for upload from file objects to S3""" + + async def read(self, size: int) -> bytes: + ... + + +class FileLikeBytesIterReader(FileLikeReader): + def __init__(self, bytes_iter: BytesIter): + self._bytes_iter = bytes_iter + self._buffer = bytearray() + self._async_iterator = self._get_iterator() + + async def _get_iterator(self): + async for chunk in self._bytes_iter: + yield chunk + + async def read(self, size: int) -> bytes: + while len(self._buffer) < size: + try: + chunk = await anext(self._async_iterator) + self._buffer.extend(chunk) + except StopAsyncIteration: + break # End of file + + result, self._buffer = self._buffer[:size], self._buffer[size:] + return bytes(result) diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py index 4aaca5e8d84..d8fb9b288ae 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument import asyncio -from typing import Callable +from collections.abc import Callable import pytest from aiohttp import ClientResponseError, web @@ -36,7 +36,6 @@ def client( unused_tcp_port_factory: Callable, app: web.Application, ) -> TestClient: - return event_loop.run_until_complete( aiohttp_client(app, server_kwargs={"port": unused_tcp_port_factory()}) ) @@ -65,7 +64,7 @@ async def test_long_running_task_request_raises_400( def short_poll_interval(monkeypatch: pytest.MonkeyPatch): monkeypatch.setattr( lr_client, - "_DEFAULT_POLL_INTERVAL_S", + "DEFAULT_POLL_INTERVAL_S", 0.01, ) diff --git a/packages/service-library/tests/aiohttp/test_docker_utils.py b/packages/service-library/tests/aiohttp/test_docker_utils.py index bcd2129abd2..edd63559b3e 100644 --- a/packages/service-library/tests/aiohttp/test_docker_utils.py +++ b/packages/service-library/tests/aiohttp/test_docker_utils.py @@ -95,15 +95,15 @@ def _assert_progress_report_values( # NOTE: we exclude the message part here as this is already tested in servicelib # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( - exclude={"message"} + exclude={"message", "attempt"} ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( - exclude={"message"} + exclude={"message", "attempt"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( - exclude={"message"} + exclude={"message", "attempt"} ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( - exclude={"message"} + exclude={"message", "attempt"} ) diff --git a/packages/service-library/tests/aiohttp/test_rest_middlewares.py b/packages/service-library/tests/aiohttp/test_rest_middlewares.py index 00f371544b7..0bd501066d9 100644 --- a/packages/service-library/tests/aiohttp/test_rest_middlewares.py +++ b/packages/service-library/tests/aiohttp/test_rest_middlewares.py @@ -13,15 +13,13 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from common_library.error_codes import parse_error_code from common_library.json_serialization import json_dumps from servicelib.aiohttp import status from servicelib.aiohttp.rest_middlewares import ( - _FMSG_INTERNAL_ERROR_USER_FRIENDLY_WITH_OEC, envelope_middleware_factory, error_middleware_factory, ) -from servicelib.aiohttp.rest_responses import is_enveloped, unwrap_envelope +from servicelib.rest_responses import is_enveloped, unwrap_envelope @dataclass @@ -30,8 +28,7 @@ class Data: y: str = "foo" -class SomeUnexpectedError(Exception): - ... +class SomeUnexpectedError(Exception): ... class Handlers: @@ -235,14 +232,6 @@ async def test_raised_unhandled_exception( assert not data assert error - # user friendly message with OEC reference - assert "OEC" in error["message"] - parsed_oec = parse_error_code(error["message"]).pop() - assert ( - _FMSG_INTERNAL_ERROR_USER_FRIENDLY_WITH_OEC.format(error_code=parsed_oec) - == error["message"] - ) - # avoids details assert not error.get("errors") assert not error.get("logs") diff --git a/packages/service-library/tests/aiohttp/test_rest_responses.py b/packages/service-library/tests/aiohttp/test_rest_responses.py index d1b28d1e9fe..9865c805bfd 100644 --- a/packages/service-library/tests/aiohttp/test_rest_responses.py +++ b/packages/service-library/tests/aiohttp/test_rest_responses.py @@ -3,6 +3,7 @@ # pylint: disable=unused-variable import itertools +import json import pytest from aiohttp import web @@ -15,6 +16,7 @@ HTTPNotModified, HTTPOk, ) +from common_library.error_codes import ErrorCodeStr, create_error_code from servicelib.aiohttp import status from servicelib.aiohttp.rest_responses import create_http_error, exception_to_response from servicelib.aiohttp.web_exceptions_extension import ( @@ -58,26 +60,40 @@ def test_collected_http_errors_map(status_code: int, http_error_cls: type[HTTPEr @pytest.mark.parametrize("skip_details", [True, False]) -def tests_exception_to_response(skip_details: bool): - exception = create_http_error( - errors=[RuntimeError("foo")], - reason="Something whent wrong", +@pytest.mark.parametrize("error_code", [None, create_error_code(Exception("fake"))]) +def tests_exception_to_response(skip_details: bool, error_code: ErrorCodeStr | None): + + expected_reason = "Something whent wrong !" + expected_exceptions: list[Exception] = [RuntimeError("foo")] + + http_error = create_http_error( + errors=expected_exceptions, + reason=expected_reason, http_error_cls=web.HTTPInternalServerError, skip_internal_error_details=skip_details, + error_code=error_code, ) # For now until deprecated SEE https://github.com/aio-libs/aiohttp/issues/2415 - assert isinstance(exception, Exception) - assert isinstance(exception, web.Response) - assert hasattr(exception, "__http_exception__") + assert isinstance(http_error, Exception) + assert isinstance(http_error, web.Response) + assert hasattr(http_error, "__http_exception__") # until they have exception.make_response(), we user - response = exception_to_response(exception) + response = exception_to_response(http_error) assert isinstance(response, web.Response) assert not isinstance(response, Exception) assert not hasattr(response, "__http_exception__") + # checks response components assert response.content_type == MIMETYPE_APPLICATION_JSON assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR assert response.text assert response.body + + # checks response model + response_json = json.loads(response.text) + assert response_json["data"] is None + assert response_json["error"]["message"] == expected_reason + assert response_json["error"]["supportId"] == error_code + assert response_json["error"]["status"] == response.status diff --git a/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml b/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml index 22ebab6fa0c..fb1d76c8a0c 100644 --- a/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml +++ b/packages/service-library/tests/aiohttp/with_postgres/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" + image: "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f" restart: always environment: POSTGRES_DB: db diff --git a/packages/service-library/tests/archiving_utils/test_archiving_utils.py b/packages/service-library/tests/archiving_utils/test_archiving_utils.py index 3aab7383644..ba47ee00c0b 100644 --- a/packages/service-library/tests/archiving_utils/test_archiving_utils.py +++ b/packages/service-library/tests/archiving_utils/test_archiving_utils.py @@ -4,19 +4,18 @@ # pylint:disable=no-name-in-module import asyncio -import hashlib import os import secrets import string import tempfile from collections.abc import Callable, Iterable -from concurrent.futures import ProcessPoolExecutor from pathlib import Path import pytest from faker import Faker from pydantic import ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture +from pytest_simcore.helpers.comparing import compute_hash, compute_hashes from servicelib.archiving_utils import archive_dir, unarchive_dir @@ -92,32 +91,6 @@ def get_all_files_in_dir(dir_path: Path) -> set[Path]: } -def _compute_hash(file_path: Path) -> tuple[Path, str]: - with Path.open(file_path, "rb") as file_to_hash: - file_hash = hashlib.md5() # noqa: S324 - chunk = file_to_hash.read(8192) - while chunk: - file_hash.update(chunk) - chunk = file_to_hash.read(8192) - - return file_path, file_hash.hexdigest() - - -async def compute_hashes(file_paths: list[Path]) -> dict[Path, str]: - """given a list of files computes hashes for the files on a process pool""" - - loop = asyncio.get_event_loop() - - with ProcessPoolExecutor() as prcess_pool_executor: - tasks = [ - loop.run_in_executor(prcess_pool_executor, _compute_hash, file_path) - for file_path in file_paths - ] - # pylint: disable=unnecessary-comprehension - # see return value of _compute_hash it is a tuple, mapping list[Tuple[Path,str]] to Dict[Path, str] here - return dict(await asyncio.gather(*tasks)) - - def full_file_path_from_dir_and_subdirs(dir_path: Path) -> list[Path]: return [x for x in dir_path.rglob("*") if x.is_file()] @@ -423,8 +396,8 @@ async def test_regression_archive_hash_does_not_change( await archive_dir(mixed_file_types, second_archive, compress=compress) assert second_archive.exists() - _, first_hash = _compute_hash(first_archive) - _, second_hash = _compute_hash(second_archive) + _, first_hash = compute_hash(first_archive) + _, second_hash = compute_hash(second_archive) assert first_hash == second_hash diff --git a/packages/service-library/tests/fastapi/test_cancellation_middleware.py b/packages/service-library/tests/fastapi/test_cancellation_middleware.py new file mode 100644 index 00000000000..5ca9ff92172 --- /dev/null +++ b/packages/service-library/tests/fastapi/test_cancellation_middleware.py @@ -0,0 +1,148 @@ +# pylint: disable=redefined-outer-name + +import asyncio +import logging +from collections.abc import Iterator +from threading import Thread +from unittest.mock import AsyncMock + +import httpx +import pytest +import uvicorn +from fastapi import APIRouter, BackgroundTasks, FastAPI +from pytest_simcore.helpers.logging_tools import log_context +from servicelib.fastapi.cancellation_middleware import RequestCancellationMiddleware +from servicelib.utils import unused_port +from yarl import URL + + +@pytest.fixture +def server_done_event() -> asyncio.Event: + return asyncio.Event() + + +@pytest.fixture +def server_cancelled_mock() -> AsyncMock: + return AsyncMock() + + +@pytest.fixture +def fastapi_router( + server_done_event: asyncio.Event, server_cancelled_mock: AsyncMock +) -> APIRouter: + router = APIRouter() + + @router.get("/sleep") + async def sleep(sleep_time: float) -> dict[str, str]: + with log_context(logging.INFO, msg="sleeper") as ctx: + try: + await asyncio.sleep(sleep_time) + return {"message": f"Slept for {sleep_time} seconds"} + except asyncio.CancelledError: + ctx.logger.info("sleeper cancelled!") + await server_cancelled_mock() + return {"message": "Cancelled"} + finally: + server_done_event.set() + + async def _sleep_in_the_back(sleep_time: float) -> None: + with log_context(logging.INFO, msg="sleeper in the back") as ctx: + try: + await asyncio.sleep(sleep_time) + except asyncio.CancelledError: + ctx.logger.info("sleeper in the back cancelled!") + await server_cancelled_mock() + finally: + server_done_event.set() + + @router.get("/sleep-with-background-task") + async def sleep_with_background_task( + sleep_time: float, background_tasks: BackgroundTasks + ) -> dict[str, str]: + with log_context(logging.INFO, msg="sleeper with background task"): + background_tasks.add_task(_sleep_in_the_back, sleep_time) + return {"message": "Sleeping in the back"} + + return router + + +@pytest.fixture +def fastapi_app(fastapi_router: APIRouter) -> FastAPI: + app = FastAPI() + app.include_router(fastapi_router) + app.add_middleware(RequestCancellationMiddleware) + return app + + +@pytest.fixture +def uvicorn_server(fastapi_app: FastAPI) -> Iterator[URL]: + random_port = unused_port() + with log_context( + logging.INFO, + msg=f"with uvicorn server on 127.0.0.1:{random_port}", + ) as ctx: + config = uvicorn.Config( + fastapi_app, + host="127.0.0.1", + port=random_port, + log_level="error", + ) + server = uvicorn.Server(config) + + thread = Thread(target=server.run) + thread.daemon = True + thread.start() + + ctx.logger.info( + "server ready at: %s", + f"http://127.0.0.1:{random_port}", + ) + + yield URL(f"http://127.0.0.1:{random_port}") + + server.should_exit = True + thread.join(timeout=10) + + +async def test_server_cancels_when_client_disconnects( + uvicorn_server: URL, + server_done_event: asyncio.Event, + server_cancelled_mock: AsyncMock, +): + async with httpx.AsyncClient(base_url=f"{uvicorn_server}") as client: + # check standard call still complete as expected + with log_context(logging.INFO, msg="client calling endpoint"): + response = await client.get("/sleep", params={"sleep_time": 0.1}) + assert response.status_code == 200 + assert response.json() == {"message": "Slept for 0.1 seconds"} + async with asyncio.timeout(10): + await server_done_event.wait() + server_done_event.clear() + + # check slow call get cancelled + with log_context( + logging.INFO, msg="client calling endpoint for cancellation" + ) as ctx: + with pytest.raises(httpx.ReadTimeout): + response = await client.get( + "/sleep", params={"sleep_time": 10}, timeout=0.1 + ) + ctx.logger.info("client disconnected from server") + + async with asyncio.timeout(5): + await server_done_event.wait() + server_cancelled_mock.assert_called_once() + server_cancelled_mock.reset_mock() + server_done_event.clear() + + # NOTE: shows that FastAPI BackgroundTasks get cancelled too! + # check background tasks get cancelled as well sadly + with log_context(logging.INFO, msg="client calling endpoint for cancellation"): + response = await client.get( + "/sleep-with-background-task", + params={"sleep_time": 2}, + ) + assert response.status_code == 200 + async with asyncio.timeout(5): + await server_done_event.wait() + server_cancelled_mock.assert_called_once() diff --git a/packages/service-library/tests/fastapi/test_docker_utils.py b/packages/service-library/tests/fastapi/test_docker_utils.py index f6d78066c97..55898891a14 100644 --- a/packages/service-library/tests/fastapi/test_docker_utils.py +++ b/packages/service-library/tests/fastapi/test_docker_utils.py @@ -101,15 +101,15 @@ def _assert_progress_report_values( # NOTE: we exclude the message part here as this is already tested in servicelib # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( - exclude={"message"} + exclude={"message", "attempt"} ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( - exclude={"message"} + exclude={"message", "attempt"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( - exclude={"message"} + exclude={"message", "attempt"} ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( - exclude={"message"} + exclude={"message", "attempt"} ) diff --git a/packages/service-library/tests/fastapi/test_lifespan_utils.py b/packages/service-library/tests/fastapi/test_lifespan_utils.py new file mode 100644 index 00000000000..b3619815db8 --- /dev/null +++ b/packages/service-library/tests/fastapi/test_lifespan_utils.py @@ -0,0 +1,40 @@ +from collections.abc import AsyncIterator + +import asgi_lifespan +import pytest +from fastapi import FastAPI +from fastapi_lifespan_manager import State +from servicelib.fastapi.lifespan_utils import combine_lifespans + + +async def test_multiple_lifespan_managers(capsys: pytest.CaptureFixture): + async def database_lifespan(app: FastAPI) -> AsyncIterator[State]: + _ = app + print("setup DB") + yield {} + print("shutdown DB") + + async def cache_lifespan(app: FastAPI) -> AsyncIterator[State]: + _ = app + print("setup CACHE") + yield {} + print("shutdown CACHE") + + app = FastAPI(lifespan=combine_lifespans(database_lifespan, cache_lifespan)) + + capsys.readouterr() + + async with asgi_lifespan.LifespanManager(app): + messages = capsys.readouterr().out + + assert "setup DB" in messages + assert "setup CACHE" in messages + assert "shutdown DB" not in messages + assert "shutdown CACHE" not in messages + + messages = capsys.readouterr().out + + assert "setup DB" not in messages + assert "setup CACHE" not in messages + assert "shutdown DB" in messages + assert "shutdown CACHE" in messages diff --git a/packages/service-library/tests/fastapi/test_tracing.py b/packages/service-library/tests/fastapi/test_tracing.py index 412b59b116d..a7a1afb4ba7 100644 --- a/packages/service-library/tests/fastapi/test_tracing.py +++ b/packages/service-library/tests/fastapi/test_tracing.py @@ -11,7 +11,7 @@ import pytest from fastapi import FastAPI from pydantic import ValidationError -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from settings_library.tracing import TracingSettings @@ -54,13 +54,13 @@ async def test_valid_tracing_settings( uninstrument_opentelemetry: Iterator[None], ): tracing_settings = TracingSettings() - setup_tracing( + initialize_tracing( mocked_app, tracing_settings=tracing_settings, service_name="Mock-Openetlemetry-Pytest", ) # idempotency - setup_tracing( + initialize_tracing( mocked_app, tracing_settings=tracing_settings, service_name="Mock-Openetlemetry-Pytest", @@ -92,7 +92,7 @@ async def test_invalid_tracing_settings( app = mocked_app with pytest.raises((BaseException, ValidationError, TypeError)): # noqa: PT012 tracing_settings = TracingSettings() - setup_tracing( + initialize_tracing( app, tracing_settings=tracing_settings, service_name="Mock-Openetlemetry-Pytest", @@ -146,13 +146,13 @@ async def test_tracing_setup_package_detection( importlib.import_module(package_name) # tracing_settings = TracingSettings() - setup_tracing( + initialize_tracing( mocked_app, tracing_settings=tracing_settings, service_name="Mock-Openetlemetry-Pytest", ) # idempotency - setup_tracing( + initialize_tracing( mocked_app, tracing_settings=tracing_settings, service_name="Mock-Openetlemetry-Pytest", diff --git a/packages/service-library/tests/rabbitmq/conftest.py b/packages/service-library/tests/rabbitmq/conftest.py index 79f1c0cdb32..e107d848daa 100644 --- a/packages/service-library/tests/rabbitmq/conftest.py +++ b/packages/service-library/tests/rabbitmq/conftest.py @@ -1,10 +1,31 @@ -from collections.abc import AsyncIterator, Callable, Coroutine +from collections.abc import AsyncIterator, Awaitable, Callable, Coroutine from typing import cast import aiodocker import arrow import pytest from faker import Faker +from models_library.rabbitmq_basic_types import RPCNamespace +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient + + +@pytest.fixture +async def rpc_client( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + return await rabbitmq_rpc_client("pytest_rpc_client") + + +@pytest.fixture +async def rpc_server( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + return await rabbitmq_rpc_client("pytest_rpc_server") + + +@pytest.fixture +def namespace() -> RPCNamespace: + return RPCNamespace.from_entries({f"test{i}": f"test{i}" for i in range(8)}) @pytest.fixture(autouse=True) diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py index 46588de6e87..40417c4d4c3 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py @@ -2,7 +2,7 @@ # pylint:disable=unused-argument import asyncio -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable from typing import Any, Final import pytest @@ -23,11 +23,6 @@ MULTIPLE_REQUESTS_COUNT: Final[NonNegativeInt] = 100 -@pytest.fixture -def namespace() -> RPCNamespace: - return RPCNamespace.from_entries({f"test{i}": f"test{i}" for i in range(8)}) - - async def add_me(*, x: Any, y: Any) -> Any: return x + y # NOTE: types are not enforced @@ -49,20 +44,6 @@ def __add__(self, other: "CustomClass") -> "CustomClass": return CustomClass(x=self.x + other.x, y=self.y + other.y) -@pytest.fixture -async def rpc_client( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], -) -> RabbitMQRPCClient: - return await rabbitmq_rpc_client("pytest_rpc_client") - - -@pytest.fixture -async def rpc_server( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], -) -> RabbitMQRPCClient: - return await rabbitmq_rpc_client("pytest_rpc_server") - - @pytest.mark.parametrize( "x,y,expected_result,expected_type", [ diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc_interfaces_async_jobs.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc_interfaces_async_jobs.py new file mode 100644 index 00000000000..2522764a4ca --- /dev/null +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc_interfaces_async_jobs.py @@ -0,0 +1,251 @@ +import asyncio +import datetime +from collections.abc import AsyncIterator +from dataclasses import dataclass, field + +import pytest +from faker import Faker +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobId, + AsyncJobNameData, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import JobMissingError +from models_library.progress_bar import ProgressReport +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from pydantic import TypeAdapter +from servicelib.async_utils import cancel_wait_task +from servicelib.rabbitmq import RabbitMQRPCClient, RemoteMethodNotRegisteredError +from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( + list_jobs, + submit, + submit_and_wait, +) + +pytest_simcore_core_services_selection = [ + "rabbit", +] + + +@pytest.fixture +def method_name(faker: Faker) -> RPCMethodName: + return TypeAdapter(RPCMethodName).validate_python(faker.word()) + + +@pytest.fixture +def job_id_data(faker: Faker) -> AsyncJobNameData: + return AsyncJobNameData( + user_id=faker.pyint(min_value=1), + product_name=faker.word(), + ) + + +@pytest.fixture +def job_id(faker: Faker) -> AsyncJobId: + return faker.uuid4(cast_to=None) + + +@pytest.fixture +async def async_job_rpc_server( # noqa: C901 + rpc_server: RabbitMQRPCClient, + faker: Faker, + namespace: RPCNamespace, + method_name: RPCMethodName, +) -> AsyncIterator[None]: + async def _slow_task() -> None: + await asyncio.sleep(2) + + @dataclass + class FakeServer: + tasks: list[asyncio.Task] = field(default_factory=list) + + def _get_task(self, job_id: AsyncJobId) -> asyncio.Task: + for task in self.tasks: + if task.get_name() == f"{job_id}": + return task + raise JobMissingError(job_id=f"{job_id}") + + async def status( + self, job_id: AsyncJobId, job_id_data: AsyncJobNameData + ) -> AsyncJobStatus: + assert job_id_data + task = self._get_task(job_id) + return AsyncJobStatus( + job_id=job_id, + progress=ProgressReport(actual_value=1 if task.done() else 0.3), + done=task.done(), + ) + + async def cancel( + self, job_id: AsyncJobId, job_id_data: AsyncJobNameData + ) -> None: + assert job_id + assert job_id_data + task = self._get_task(job_id) + task.cancel() + + async def result( + self, job_id: AsyncJobId, job_id_data: AsyncJobNameData + ) -> AsyncJobResult: + assert job_id_data + task = self._get_task(job_id) + assert task.done() + return AsyncJobResult( + result={ + "data": task.result(), + "job_id": job_id, + "job_id_data": job_id_data, + } + ) + + async def list_jobs( + self, filter_: str, job_id_data: AsyncJobNameData + ) -> list[AsyncJobGet]: + assert job_id_data + assert filter_ is not None + + return [ + AsyncJobGet( + job_id=TypeAdapter(AsyncJobId).validate_python(t.get_name()) + ) + for t in self.tasks + ] + + async def submit(self, job_id_data: AsyncJobNameData) -> AsyncJobGet: + assert job_id_data + job_id = faker.uuid4(cast_to=None) + self.tasks.append(asyncio.create_task(_slow_task(), name=f"{job_id}")) + return AsyncJobGet(job_id=job_id) + + async def setup(self) -> None: + for m in (self.status, self.cancel, self.result): + await rpc_server.register_handler( + namespace, RPCMethodName(m.__name__), m + ) + await rpc_server.register_handler( + namespace, RPCMethodName(self.list_jobs.__name__), self.list_jobs + ) + + await rpc_server.register_handler(namespace, method_name, self.submit) + + fake_server = FakeServer() + await fake_server.setup() + + yield + + for task in fake_server.tasks: + await cancel_wait_task(task) + + +@pytest.mark.parametrize("method", ["result", "status", "cancel"]) +async def test_async_jobs_methods( + async_job_rpc_server: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + namespace: RPCNamespace, + job_id_data: AsyncJobNameData, + job_id: AsyncJobId, + method: str, +): + from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs + + async_jobs_method = getattr(async_jobs, method) + with pytest.raises(JobMissingError): + await async_jobs_method( + rpc_client, + rpc_namespace=namespace, + job_id=job_id, + job_id_data=job_id_data, + ) + + +async def test_list_jobs( + async_job_rpc_server: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + namespace: RPCNamespace, + method_name: RPCMethodName, + job_id_data: AsyncJobNameData, +): + await list_jobs( + rpc_client, + rpc_namespace=namespace, + filter_="", + job_id_data=job_id_data, + ) + + +async def test_submit( + async_job_rpc_server: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + namespace: RPCNamespace, + method_name: RPCMethodName, + job_id_data: AsyncJobNameData, +): + await submit( + rpc_client, + rpc_namespace=namespace, + method_name=method_name, + job_id_data=job_id_data, + ) + + +async def test_submit_with_invalid_method_name( + async_job_rpc_server: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + namespace: RPCNamespace, + job_id_data: AsyncJobNameData, +): + with pytest.raises(RemoteMethodNotRegisteredError): + await submit( + rpc_client, + rpc_namespace=namespace, + method_name=RPCMethodName("invalid_method_name"), + job_id_data=job_id_data, + ) + + +async def test_submit_and_wait_properly_timesout( + async_job_rpc_server: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + namespace: RPCNamespace, + method_name: RPCMethodName, + job_id_data: AsyncJobNameData, +): + with pytest.raises(TimeoutError): # noqa: PT012 + async for _job_composed_result in submit_and_wait( + rpc_client, + rpc_namespace=namespace, + method_name=method_name, + job_id_data=job_id_data, + client_timeout=datetime.timedelta(seconds=0.1), + ): + pass + + +async def test_submit_and_wait( + async_job_rpc_server: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + namespace: RPCNamespace, + method_name: RPCMethodName, + job_id_data: AsyncJobNameData, +): + async for job_composed_result in submit_and_wait( + rpc_client, + rpc_namespace=namespace, + method_name=method_name, + job_id_data=job_id_data, + client_timeout=datetime.timedelta(seconds=10), + ): + if not job_composed_result.done: + with pytest.raises(ValueError, match="No result ready!"): + await job_composed_result.result() + assert job_composed_result.done + assert job_composed_result.status.progress.actual_value == 1 + assert await job_composed_result.result() == AsyncJobResult( + result={ + "data": None, + "job_id": job_composed_result.status.job_id, + "job_id_data": job_id_data, + } + ) diff --git a/packages/service-library/tests/test_bytes_iters.py b/packages/service-library/tests/test_bytes_iters.py new file mode 100644 index 00000000000..32c3037a9f0 --- /dev/null +++ b/packages/service-library/tests/test_bytes_iters.py @@ -0,0 +1,137 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + +import secrets +from collections.abc import AsyncIterable +from pathlib import Path +from unittest.mock import Mock + +import pytest +from faker import Faker +from pytest_mock import MockerFixture +from pytest_simcore.helpers.comparing import ( + assert_same_contents, + get_files_info_from_path, + get_relative_to, +) +from servicelib.archiving_utils import unarchive_dir +from servicelib.bytes_iters import ( + ArchiveEntries, + DiskStreamReader, + DiskStreamWriter, + get_zip_bytes_iter, +) +from servicelib.file_utils import remove_directory +from servicelib.progress_bar import ProgressBarData +from servicelib.s3_utils import FileLikeBytesIterReader + + +def _ensure_dir(path: Path) -> Path: + path.mkdir(parents=True, exist_ok=True) + assert path.exists() + assert path.is_dir() + return path + + +@pytest.fixture +def local_files_dir(tmp_path: Path) -> Path: + # Cotent to add to the zip + return _ensure_dir(tmp_path / "local_files_dir") + + +@pytest.fixture +def local_archive_path(tmp_path: Path) -> Path: + # local destination of archive (either form S3 or archived locally) + return tmp_path / "archive.zip" + + +@pytest.fixture +def local_unpacked_archive(tmp_path: Path) -> Path: + # contents of unpacked archive + return _ensure_dir(tmp_path / "unpacked_archive") + + +def _rand_range(lower: int, upper: int) -> int: + return secrets.randbelow(upper) + (upper - lower) + 1 + + +def _generate_files_in_path(faker: Faker, base_dir: Path, *, prefix: str = "") -> None: + # mixed small text files and binary files + (base_dir / "empty").mkdir() + + (base_dir / "d1").mkdir() + for i in range(_rand_range(10, 40)): + (base_dir / "d1" / f"{prefix}f{i}.txt").write_text(faker.text()) + (base_dir / "d1" / f"{prefix}b{i}.bin").write_bytes(faker.json_bytes()) + + (base_dir / "d1" / "sd1").mkdir() + for i in range(_rand_range(10, 40)): + (base_dir / "d1" / "sd1" / f"{prefix}f{i}.txt").write_text(faker.text()) + (base_dir / "d1" / "sd1" / f"{prefix}b{i}.bin").write_bytes(faker.json_bytes()) + + (base_dir / "fancy-names").mkdir() + for fancy_name in ( + "i have some spaces in my name", + "(%$)&%$()", + " ", + ): + (base_dir / "fancy-names" / fancy_name).write_text(faker.text()) + + +@pytest.fixture +async def prepare_content(local_files_dir: Path, faker: Faker) -> AsyncIterable[None]: + _generate_files_in_path(faker, local_files_dir, prefix="local_") + yield + await remove_directory(local_files_dir, only_children=True) + + +@pytest.fixture +def mocked_progress_bar_cb(mocker: MockerFixture) -> Mock: + def _progress_cb(*args, **kwargs) -> None: + print(f"received progress: {args}, {kwargs}") + + return mocker.Mock(side_effect=_progress_cb) + + +@pytest.mark.parametrize("use_file_like", [True, False]) +async def test_get_zip_bytes_iter( + mocked_progress_bar_cb: Mock, + prepare_content: None, + local_files_dir: Path, + local_archive_path: Path, + local_unpacked_archive: Path, + use_file_like: bool, +): + # 1. generate archive form soruces + archive_files: ArchiveEntries = [] + for file in (x for x in local_files_dir.rglob("*") if x.is_file()): + archive_name = get_relative_to(local_files_dir, file) + + archive_files.append( + (archive_name, DiskStreamReader(file).get_bytes_streamer()) + ) + + writer = DiskStreamWriter(local_archive_path) + + async with ProgressBarData( + num_steps=1, + progress_report_cb=mocked_progress_bar_cb, + description="root_bar", + ) as root: + bytes_iter = get_zip_bytes_iter( + archive_files, progress_bar=root, chunk_size=1024 + ) + + if use_file_like: + await writer.write_from_file_like(FileLikeBytesIterReader(bytes_iter)) + else: + await writer.write_from_bytes_iter(bytes_iter) + + # 2. extract archive using exiting tools + await unarchive_dir(local_archive_path, local_unpacked_archive) + + # 3. compare files in directories (same paths & sizes) + await assert_same_contents( + get_files_info_from_path(local_files_dir), + get_files_info_from_path(local_unpacked_archive), + ) diff --git a/packages/service-library/tests/test_logging_errors.py b/packages/service-library/tests/test_logging_errors.py index 8bbbee60d40..ac99c2fd657 100644 --- a/packages/service-library/tests/test_logging_errors.py +++ b/packages/service-library/tests/test_logging_errors.py @@ -3,8 +3,7 @@ import logging import pytest - -from common_library.error_codes import create_error_code +from common_library.error_codes import create_error_code, parse_error_code_parts from common_library.errors_classes import OsparcErrorMixin from servicelib.logging_errors import ( create_troubleshotting_log_kwargs, @@ -22,7 +21,11 @@ class MyError(OsparcErrorMixin, RuntimeError): exc = exc_info.value error_code = create_error_code(exc) - assert exc.error_code() == error_code + eoc1_fingerprint, eoc1_snapshot = parse_error_code_parts(error_code) + eoc2_fingerprint, eoc2_snapshot = parse_error_code_parts(exc.error_code()) + + assert eoc1_fingerprint == eoc2_fingerprint + assert eoc1_snapshot <= eoc2_snapshot msg = f"Nice message to user [{error_code}]" @@ -45,7 +48,7 @@ class MyError(OsparcErrorMixin, RuntimeError): assert log_kwargs["extra"] is not None assert ( # pylint: disable=unsubscriptable-object - log_kwargs["extra"]["log_uid"] + log_kwargs["extra"].get("log_uid") == "123" ), "user_id is injected as extra from context" diff --git a/packages/service-library/tests/test_progress_bar.py b/packages/service-library/tests/test_progress_bar.py index 6c1b7a2756b..e99516cac98 100644 --- a/packages/service-library/tests/test_progress_bar.py +++ b/packages/service-library/tests/test_progress_bar.py @@ -15,6 +15,7 @@ from servicelib.progress_bar import ( _INITIAL_VALUE, _MIN_PROGRESS_UPDATE_PERCENT, + _PROGRESS_ALREADY_REACGED_MAXIMUM, ProgressBarData, ) @@ -258,10 +259,43 @@ async def test_set_progress(caplog: pytest.LogCaptureFixture, faker: Faker): assert root._current_steps == pytest.approx(34) # noqa: SLF001 await root.set_(58) assert root._current_steps == pytest.approx(50) # noqa: SLF001 - assert "already reached maximum" in caplog.messages[0] + assert "WARNING" in caplog.text + assert _PROGRESS_ALREADY_REACGED_MAXIMUM in caplog.messages[0] assert "TIP:" in caplog.messages[0] +async def test_reset_progress(caplog: pytest.LogCaptureFixture, faker: Faker): + async with ProgressBarData(num_steps=50, description=faker.pystr()) as root: + assert root._current_steps == pytest.approx(0) # noqa: SLF001 + assert root.num_steps == 50 + assert root.step_weights is None + await root.set_(50) + assert root._current_steps == pytest.approx(50) # noqa: SLF001 + assert "WARNING" not in caplog.text + assert _PROGRESS_ALREADY_REACGED_MAXIMUM not in caplog.text + await root.set_(51) + assert root._current_steps == pytest.approx(50) # noqa: SLF001 + assert "WARNING" in caplog.text + assert _PROGRESS_ALREADY_REACGED_MAXIMUM in caplog.text + + caplog.clear() + root.reset() + + assert root._current_steps == pytest.approx(-1) # noqa: SLF001 + assert "WARNING" not in caplog.text + assert _PROGRESS_ALREADY_REACGED_MAXIMUM not in caplog.text + + await root.set_(12) + assert root._current_steps == pytest.approx(12) # noqa: SLF001 + assert "WARNING" not in caplog.text + assert _PROGRESS_ALREADY_REACGED_MAXIMUM not in caplog.text + + await root.set_(51) + assert root._current_steps == pytest.approx(50) # noqa: SLF001 + assert "WARNING" in caplog.text + assert _PROGRESS_ALREADY_REACGED_MAXIMUM in caplog.text + + async def test_concurrent_progress_bar(faker: Faker): async def do_something(root: ProgressBarData): async with root.sub_progress(steps=50, description=faker.pystr()) as sub: @@ -304,7 +338,7 @@ async def test_too_many_updates_does_not_raise_but_show_warning_with_stack( await root.update() await root.update() await root.update() - assert "already reached maximum" in caplog.messages[0] + assert _PROGRESS_ALREADY_REACGED_MAXIMUM in caplog.messages[0] assert "TIP:" in caplog.messages[0] diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index 8381c904587..bc7e8331334 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -23,8 +23,11 @@ pydantic-core==2.27.2 # via pydantic pydantic-extra-types==2.10.2 # via -r requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 - # via -r requirements/_base.in +pydantic-settings==2.7.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in pygments==2.19.1 # via rich python-dotenv==1.0.1 @@ -35,7 +38,7 @@ rich==13.9.4 # typer shellingham==1.5.4 # via typer -typer==0.15.1 +typer==0.15.2 # via -r requirements/_base.in typing-extensions==4.12.2 # via diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt index d91e41f728f..fb8381375d5 100644 --- a/packages/settings-library/requirements/_test.txt +++ b/packages/settings-library/requirements/_test.txt @@ -1,8 +1,8 @@ -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in iniconfig==2.0.0 # via pytest @@ -12,7 +12,7 @@ packaging==24.2 # pytest-sugar pluggy==1.5.0 # via pytest -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-cov @@ -29,17 +29,11 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via faker python-dotenv==1.0.1 # via # -c requirements/_base.txt # -r requirements/_test.in -six==1.17.0 - # via python-dateutil termcolor==2.5.0 # via pytest-sugar -typing-extensions==4.12.2 - # via - # -c requirements/_base.txt - # faker +tzdata==2025.1 + # via faker diff --git a/packages/settings-library/requirements/_tools.txt b/packages/settings-library/requirements/_tools.txt index 43cc43ec350..13e0ee77ce6 100644 --- a/packages/settings-library/requirements/_tools.txt +++ b/packages/settings-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -42,7 +42,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -53,7 +53,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -63,18 +63,17 @@ pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt - # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/settings-library/src/settings_library/celery.py b/packages/settings-library/src/settings_library/celery.py new file mode 100644 index 00000000000..9259b574047 --- /dev/null +++ b/packages/settings-library/src/settings_library/celery.py @@ -0,0 +1,52 @@ +from datetime import timedelta +from typing import Annotated + +from pydantic import Field +from pydantic_settings import SettingsConfigDict +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings + +from .base import BaseCustomSettings + + +class CelerySettings(BaseCustomSettings): + CELERY_RABBIT_BROKER: Annotated[ + RabbitSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] + CELERY_REDIS_RESULT_BACKEND: Annotated[ + RedisSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] + CELERY_RESULT_EXPIRES: Annotated[ + timedelta, + Field( + description="Time after which task results will be deleted (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)." + ), + ] = timedelta(days=7) + CELERY_RESULT_PERSISTENT: Annotated[ + bool, + Field( + description="If set to True, result messages will be persistent (after a broker restart)." + ), + ] = True + + model_config = SettingsConfigDict( + json_schema_extra={ + "examples": [ + { + "CELERY_RABBIT_BROKER": { + "RABBIT_USER": "guest", + "RABBIT_SECURE": False, + "RABBIT_PASSWORD": "guest", + "RABBIT_HOST": "localhost", + "RABBIT_PORT": 5672, + }, + "CELERY_REDIS_RESULT_BACKEND": { + "REDIS_HOST": "localhost", + "REDIS_PORT": 6379, + }, + "CELERY_RESULT_EXPIRES": timedelta(days=1), # type: ignore[dict-item] + "CELERY_RESULT_PERSISTENT": True, + } + ], + } + ) diff --git a/packages/settings-library/src/settings_library/docker_api_proxy.py b/packages/settings-library/src/settings_library/docker_api_proxy.py new file mode 100644 index 00000000000..01dfffbead6 --- /dev/null +++ b/packages/settings-library/src/settings_library/docker_api_proxy.py @@ -0,0 +1,24 @@ +from functools import cached_property + +from pydantic import Field, SecretStr + +from .base import BaseCustomSettings +from .basic_types import PortInt + + +class DockerApiProxysettings(BaseCustomSettings): + DOCKER_API_PROXY_HOST: str = Field( + description="hostname of the docker-api-proxy service" + ) + DOCKER_API_PROXY_PORT: PortInt = Field( + 8888, description="port of the docker-api-proxy service" + ) + DOCKER_API_PROXY_SECURE: bool = False + + DOCKER_API_PROXY_USER: str | None = None + DOCKER_API_PROXY_PASSWORD: SecretStr | None = None + + @cached_property + def base_url(self) -> str: + protocl = "https" if self.DOCKER_API_PROXY_SECURE else "http" + return f"{protocl}://{self.DOCKER_API_PROXY_HOST}:{self.DOCKER_API_PROXY_PORT}" diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index 7e3b0e7b693..6e21968d043 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -1,6 +1,5 @@ from enum import IntEnum -from pydantic import TypeAdapter from pydantic.networks import RedisDsn from pydantic.types import SecretStr @@ -18,13 +17,14 @@ class RedisDatabase(IntEnum): DISTRIBUTED_IDENTIFIERS = 6 DEFERRED_TASKS = 7 DYNAMIC_SERVICES = 8 + CELERY_TASKS = 9 class RedisSettings(BaseCustomSettings): # host REDIS_SECURE: bool = False REDIS_HOST: str = "redis" - REDIS_PORT: PortInt = TypeAdapter(PortInt).validate_python(6789) + REDIS_PORT: PortInt = 6789 # auth REDIS_USER: str | None = None @@ -42,6 +42,6 @@ def build_redis_dsn(self, db_index: RedisDatabase) -> str: ), host=self.REDIS_HOST, port=self.REDIS_PORT, - path=f"/{db_index}", + path=f"{db_index}", ) ) diff --git a/packages/settings-library/tests/test__models_examples.py b/packages/settings-library/tests/test__models_examples.py index 96ffc7135b2..c93ed934cf1 100644 --- a/packages/settings-library/tests/test__models_examples.py +++ b/packages/settings-library/tests/test__models_examples.py @@ -1,10 +1,12 @@ -import json from typing import Any import pytest import settings_library from pydantic import BaseModel -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) @pytest.mark.parametrize( @@ -12,8 +14,9 @@ walk_model_examples_in_package(settings_library), ) def test_all_settings_library_models_config_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - assert model_cls.model_validate( - example_data - ), f"Failed {example_name} : {json.dumps(example_data)}" + + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 8ed488aef03..44dd9b8ec12 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.4 +aio-pika==9.5.5 # via -r requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 # via @@ -12,9 +12,9 @@ aiofiles==24.1.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -60,7 +60,7 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -81,7 +81,7 @@ charset-normalizer==3.4.1 # via requests click==8.1.8 # via typer -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -95,7 +95,7 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.34 +faststream==0.5.35 # via -r requirements/../../../packages/service-library/requirements/_base.in flexcache==0.3 # via pint @@ -105,7 +105,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.68.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -127,7 +127,7 @@ jsonschema==4.23.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.8 +mako==1.3.9 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -154,7 +154,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -168,17 +168,17 @@ opentelemetry-api==1.29.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.29.0 +opentelemetry-exporter-otlp==1.30.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.29.0 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.29.0 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.29.0 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-asyncpg @@ -186,29 +186,29 @@ opentelemetry-instrumentation==0.50b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aiopg==0.50b0 +opentelemetry-instrumentation-aiopg==0.51b0 # via -r requirements/_base.in -opentelemetry-instrumentation-asyncpg==0.50b0 +opentelemetry-instrumentation-asyncpg==0.51b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-dbapi==0.50b0 +opentelemetry-instrumentation-dbapi==0.51b0 # via opentelemetry-instrumentation-aiopg -opentelemetry-instrumentation-logging==0.50b0 +opentelemetry-instrumentation-logging==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.50b0 +opentelemetry-instrumentation-redis==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.50b0 +opentelemetry-instrumentation-requests==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.29.0 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.29.0 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg @@ -216,7 +216,7 @@ opentelemetry-semantic-conventions==0.50b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.50b0 +opentelemetry-util-http==0.51b0 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -253,7 +253,7 @@ pint==0.24.4 # via -r requirements/_base.in platformdirs==4.3.6 # via pint -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl @@ -261,12 +261,14 @@ protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.1 +psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via # aiopg # sqlalchemy +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -313,8 +315,22 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -386,7 +402,7 @@ rich==13.9.4 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -415,6 +431,8 @@ sqlalchemy==1.4.54 # -r requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -425,7 +443,7 @@ tqdm==4.67.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -typer==0.15.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 438e972b239..f17742ef99a 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -6,11 +6,11 @@ aiofiles==24.1.0 # via # -c requirements/_base.txt # aioboto3 -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -40,7 +40,7 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -59,16 +59,16 @@ botocore==1.35.81 # boto3 # moto # s3transfer -botocore-stubs==1.36.6 +botocore-stubs==1.37.4 # via types-aiobotocore -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto charset-normalizer==3.4.1 # via @@ -79,11 +79,11 @@ click==8.1.8 # -c requirements/_base.txt # -r requirements/_test.in # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc @@ -94,7 +94,7 @@ docker==7.1.0 # moto execnet==2.1.1 # via pytest-xdist -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -102,7 +102,7 @@ flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto frozenlist==1.5.0 # via @@ -135,9 +135,7 @@ jmespath==1.0.1 # via # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -160,7 +158,7 @@ jsonschema-specifications==2024.10.1 # openapi-schema-validator lazy-object-proxy==1.10.0 # via openapi-spec-validator -mako==1.3.8 +mako==1.3.9 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -171,10 +169,8 @@ markupsafe==3.0.2 # jinja2 # mako # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy multidict==6.1.0 @@ -182,7 +178,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -206,12 +202,12 @@ ply==3.11 # via jsonpath-ng pprintpp==0.4.0 # via pytest-icdiff -propcache==0.2.1 +propcache==0.3.0 # via # -c requirements/_base.txt # aiohttp # yarl -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi @@ -226,7 +222,7 @@ pydantic-core==2.27.2 # pydantic pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -258,7 +254,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -269,7 +264,6 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -294,14 +288,14 @@ responses==0.25.6 # via moto rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.22.3 +rpds-py==0.23.1 # via # -c requirements/_base.txt # jsonschema # referencing s3transfer==0.10.4 # via boto3 -setuptools==75.8.0 +setuptools==75.8.2 # via moto six==1.17.0 # via @@ -320,17 +314,17 @@ sympy==1.13.3 # via cfn-lint termcolor==2.5.0 # via pytest-sugar -types-aiobotocore==2.19.0 +types-aiobotocore==2.21.0 # via -r requirements/_test.in -types-aiobotocore-s3==2.19.0 +types-aiobotocore-s3==2.21.0 # via types-aiobotocore types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -types-awscrt==0.23.7 +types-awscrt==0.23.10 # via botocore-stubs -types-requests==2.32.0.20241016 +types-requests==2.32.0.20250301 # via types-tqdm -types-tqdm==4.67.0.20241221 +types-tqdm==4.67.0.20250301 # via -r requirements/_test.in typing-extensions==4.12.2 # via @@ -338,13 +332,14 @@ typing-extensions==4.12.2 # alembic # aws-sam-translator # cfn-lint - # faker # mypy # pydantic # pydantic-core # sqlalchemy2-stubs # types-aiobotocore # types-aiobotocore-s3 +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt @@ -357,6 +352,7 @@ urllib3==2.3.0 werkzeug==3.1.3 # via # flask + # flask-cors # moto wrapt==1.17.2 # via diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index 4909ae52b46..57c8ea03246 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -47,7 +47,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -59,7 +59,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -71,9 +71,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via # -c requirements/_test.txt # pip-tools @@ -84,7 +84,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index b288a295db9..26873e9ec44 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -2,7 +2,6 @@ from pathlib import Path from tempfile import TemporaryDirectory -from models_library.basic_types import IDStr from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, StorageFileID from models_library.users import UserID @@ -105,7 +104,7 @@ async def _pull_legacy_archive( ) -> None: # NOTE: the legacy way of storing states was as zip archives async with progress_bar.sub_progress( - steps=2, description=IDStr(f"pulling {destination_path.name}") + steps=2, description=f"pulling {destination_path.name}" ) as sub_prog: with TemporaryDirectory() as tmp_dir_name: archive_file = Path(tmp_dir_name) / __get_s3_name( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py similarity index 95% rename from packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py rename to packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py index db77b2269f8..08bd0a8cf97 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py @@ -2,7 +2,7 @@ from typing import cast from aiohttp import ClientError, ClientSession -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( ETag, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, @@ -34,7 +34,7 @@ async def _get_location_id_from_location_name( store: LocationName, session: ClientSession, ) -> LocationID: - resp = await storage_client.get_storage_locations(session=session, user_id=user_id) + resp = await storage_client.list_storage_locations(session=session, user_id=user_id) for location in resp: if location.name == store: return cast(LocationID, location.id) # mypy wants it @@ -54,7 +54,7 @@ def _get_https_link_if_storage_secure(url: str) -> str: return url -async def _complete_upload( +async def complete_upload( session: ClientSession, upload_completion_link: AnyUrl, parts: list[UploadedPart], @@ -118,7 +118,7 @@ async def _complete_upload( raise exceptions.S3TransferError(msg) -async def _resolve_location_id( +async def resolve_location_id( client_session: ClientSession, user_id: UserID, store_name: LocationName | None, @@ -136,7 +136,7 @@ async def _resolve_location_id( return store_id -async def _abort_upload( +async def abort_upload( session: ClientSession, abort_upload_link: AnyUrl, *, reraise_exceptions: bool ) -> None: # abort the upload correctly, so it can revert back to last version diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py index 320cfd7e25f..d64f0d90355 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py @@ -6,10 +6,8 @@ from asyncio.streams import StreamReader from pathlib import Path -from common_library.errors_classes import OsparcErrorMixin - from aiocache import cached # type: ignore[import-untyped] -from models_library.basic_types import IDStr +from common_library.errors_classes import OsparcErrorMixin from pydantic import AnyUrl, ByteSize from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -242,7 +240,7 @@ async def _sync_sources( async with progress_bar.sub_progress( steps=folder_size, progress_unit="Byte", - description=IDStr(f"transferring {local_dir.name}"), + description=f"transferring {local_dir.name}", ) as sub_progress: aws_s3_cli_log_parsers: list[BaseLogParser] = ( [DebugLogParser()] if debug_logs else [] diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 55e1545881d..7f6801043cd 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -17,8 +17,12 @@ ClientSession, RequestInfo, ) -from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart -from models_library.basic_types import IDStr, SHA256Str +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + FileUploadSchema, + UploadedPart, +) +from models_library.basic_types import SHA256Str from multidict import MultiMapping from pydantic import AnyUrl, NonNegativeInt from servicelib.aiohttp import status @@ -216,7 +220,7 @@ async def download_link_to_file( sub_progress = await stack.enter_async_context( progress_bar.sub_progress( steps=file_size or 1, - description=IDStr(f"downloading {file_path.name}"), + description=f"downloading {file_path.name}", ) ) @@ -400,7 +404,7 @@ async def upload_file_to_presigned_links( ) sub_progress = await stack.enter_async_context( progress_bar.sub_progress( - steps=file_size, description=IDStr(f"uploading {file_name}") + steps=file_size, description=f"uploading {file_name}" ) ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index b7180877037..46b8444fde9 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -5,14 +5,14 @@ import aiofiles from aiohttp import ClientSession -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( ETag, FileMetaDataGet, FileUploadSchema, LinkType, UploadedPart, ) -from models_library.basic_types import IDStr, SHA256Str +from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -30,8 +30,7 @@ from yarl import URL from ..node_ports_common.client_session_manager import ClientSessionContextManager -from . import aws_s3_cli, exceptions, r_clone, storage_client -from ._filemanager import _abort_upload, _complete_upload, _resolve_location_id +from . import _filemanager_utils, aws_s3_cli, exceptions, r_clone, storage_client from .file_io_utils import ( LogRedirectCB, UploadableFileObject, @@ -48,7 +47,7 @@ async def complete_file_upload( client_session: ClientSession | None = None, ) -> ETag: async with ClientSessionContextManager(client_session) as session: - e_tag: ETag | None = await _complete_upload( + e_tag: ETag | None = await _filemanager_utils.complete_upload( session=session, upload_completion_link=upload_completion_link, parts=uploaded_parts, @@ -75,7 +74,9 @@ async def get_download_link_from_s3( :raises exceptions.StorageServerIssue """ async with ClientSessionContextManager(client_session) as session: - store_id = await _resolve_location_id(session, user_id, store_name, store_id) + store_id = await _filemanager_utils.resolve_location_id( + session, user_id, store_name, store_id + ) file_link = await storage_client.get_download_file_link( session=session, file_id=s3_object, @@ -99,7 +100,9 @@ async def get_upload_links_from_s3( sha256_checksum: SHA256Str | None, ) -> tuple[LocationID, FileUploadSchema]: async with ClientSessionContextManager(client_session) as session: - store_id = await _resolve_location_id(session, user_id, store_name, store_id) + store_id = await _filemanager_utils.resolve_location_id( + session, user_id, store_name, store_id + ) file_links = await storage_client.get_upload_file_links( session=session, file_id=s3_object, @@ -144,7 +147,9 @@ async def download_path_from_s3( ) async with ClientSessionContextManager(client_session) as session: - store_id = await _resolve_location_id(session, user_id, store_name, store_id) + store_id = await _filemanager_utils.resolve_location_id( + session, user_id, store_name, store_id + ) file_meta_data: FileMetaDataGet = await _get_file_meta_data( user_id=user_id, s3_object=s3_object, @@ -259,7 +264,7 @@ async def abort_upload( """ async with ClientSessionContextManager(client_session) as session: - await _abort_upload( + await _filemanager_utils.abort_upload( session=session, abort_upload_link=abort_upload_link, reraise_exceptions=True, @@ -364,7 +369,7 @@ async def _upload_path( # pylint: disable=too-many-arguments ) if not progress_bar: - progress_bar = ProgressBarData(num_steps=1, description=IDStr("uploading")) + progress_bar = ProgressBarData(num_steps=1, description="uploading") is_directory: bool = isinstance(path_to_upload, Path) and path_to_upload.is_dir() if ( @@ -425,13 +430,13 @@ async def _upload_path( # pylint: disable=too-many-arguments ) as exc: _logger.exception("The upload failed with an unexpected error:") if upload_links: - await _abort_upload( + await _filemanager_utils.abort_upload( session, upload_links.links.abort_upload, reraise_exceptions=False ) raise exceptions.S3TransferError from exc except CancelledError: if upload_links: - await _abort_upload( + await _filemanager_utils.abort_upload( session, upload_links.links.abort_upload, reraise_exceptions=False ) raise @@ -485,7 +490,7 @@ async def _upload_to_s3( progress_bar=progress_bar, ) # complete the upload - e_tag = await _complete_upload( + e_tag = await _filemanager_utils.complete_upload( session, upload_links.links.complete_upload, uploaded_parts, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 0283a11fe26..db5e107b753 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -11,7 +11,6 @@ from aiocache import cached # type: ignore[import-untyped] from aiofiles import tempfile from common_library.errors_classes import OsparcErrorMixin -from models_library.basic_types import IDStr from pydantic import AnyUrl, BaseModel, ByteSize from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -224,7 +223,7 @@ async def _sync_sources( async with progress_bar.sub_progress( steps=folder_size, progress_unit="Byte", - description=IDStr(f"transferring {local_dir.name}"), + description=f"transferring {local_dir.name}", ) as sub_progress: r_clone_log_parsers: list[BaseLogParser] = ( [DebugLogParser()] if debug_logs else [] diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py index b7a394a6dbd..71d80febbc0 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py @@ -10,7 +10,7 @@ from aiohttp import ClientResponse, ClientSession from aiohttp import client as aiohttp_client_module from aiohttp.client_exceptions import ClientConnectionError, ClientResponseError -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( FileLocationArray, FileMetaDataGet, FileUploadSchema, @@ -46,7 +46,7 @@ def handle_client_exception( - handler: Callable[P, Coroutine[Any, Any, R]] + handler: Callable[P, Coroutine[Any, Any, R]], ) -> Callable[P, Coroutine[Any, Any, R]]: @wraps(handler) async def wrapped(*args: P.args, **kwargs: P.kwargs) -> R: @@ -121,12 +121,13 @@ async def retry_request( async with _session_method(session, method, url, **kwargs) as response: if response.status != expected_status: # this is a more precise raise_for_status() + error_msg = await response.json() response.release() raise ClientResponseError( response.request_info, response.history, status=response.status, - message=f"Received {response.status} but was expecting {expected_status=}", + message=f"Received {response.status} but was expecting {expected_status=}: '{error_msg=}'", headers=response.headers, ) @@ -134,7 +135,7 @@ async def retry_request( @handle_client_exception -async def get_storage_locations( +async def list_storage_locations( *, session: ClientSession, user_id: UserID ) -> FileLocationArray: async with retry_request( @@ -211,7 +212,7 @@ async def get_upload_file_links( async with retry_request( session, "PUT", - f"{get_base_url()}/locations/{location_id}/files/{quote(file_id, safe='')}", + f"{get_base_url()}/locations/{location_id}/files/{file_id}", expected_status=status.HTTP_200_OK, params=query_params, ) as response: @@ -239,7 +240,6 @@ async def get_file_metadata( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}"}, ) as response: - payload = await response.json() if not payload.get("data"): # NOTE: keeps backwards compatibility diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py index 5da802ab9ea..8874f98efe7 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py @@ -1,6 +1,6 @@ import logging -from models_library.api_schemas_storage import LinkType as FileLinkType +from models_library.api_schemas_storage.storage_schemas import LinkType as FileLinkType from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.users import UserID diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index bc44698a593..59c73716ca3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -6,8 +6,7 @@ from pathlib import Path from typing import Any -from models_library.api_schemas_storage import LinkType -from models_library.basic_types import IDStr +from models_library.api_schemas_storage.storage_schemas import LinkType from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey @@ -229,7 +228,7 @@ async def _set_with_notifications( tasks = [] async with progress_bar.sub_progress( - steps=len(port_values.items()), description=IDStr("set multiple") + steps=len(port_values.items()), description="set multiple" ) as sub_progress: for port_key, (value, set_kwargs) in port_values.items(): tasks.append( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 3ddab6a29d3..014aff56529 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -6,8 +6,7 @@ from pprint import pformat from typing import Any -from models_library.api_schemas_storage import LinkType -from models_library.basic_types import IDStr +from models_library.api_schemas_storage.storage_schemas import LinkType from models_library.services_io import BaseServiceIOModel from models_library.services_types import ServicePortKey from pydantic import ( @@ -384,7 +383,7 @@ async def set( new_concrete_value=new_value, **set_kwargs, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description=IDStr("set")), + or ProgressBarData(num_steps=1, description="set"), ) await self._node_ports.save_to_db_cb(self._node_ports) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py index 3c1462d6fab..41f317b6f44 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py @@ -4,8 +4,11 @@ from pathlib import Path from typing import Any -from models_library.api_schemas_storage import FileUploadSchema, LinkType -from models_library.basic_types import IDStr, SHA256Str +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadSchema, + LinkType, +) +from models_library.basic_types import SHA256Str from models_library.services_types import FileName, ServicePortKey from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -204,7 +207,7 @@ async def pull_file_from_store( io_log_redirect_cb=io_log_redirect_cb, r_clone_settings=r_clone_settings, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description=IDStr("pulling file")), + or ProgressBarData(num_steps=1, description="pulling file"), aws_s3_cli_settings=aws_s3_cli_settings, ) # if a file alias is present use it to rename the file accordingly @@ -281,7 +284,7 @@ async def pull_file_from_download_link( local_path, io_log_redirect_cb=io_log_redirect_cb, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description=IDStr("pulling file")), + or ProgressBarData(num_steps=1, description="pulling file"), ) # if a file alias is present use it to rename the file accordingly diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index 923a373f720..b3aba7d8d35 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -13,7 +13,7 @@ import pytest import sqlalchemy as sa from aiohttp import ClientSession -from models_library.api_schemas_storage import FileUploadSchema +from models_library.api_schemas_storage.storage_schemas import FileUploadSchema from models_library.generics import Envelope from models_library.projects_nodes_io import LocationID, NodeIDStr, SimcoreS3FileID from models_library.users import UserID diff --git a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py index ed2033813e1..59806926025 100644 --- a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py @@ -14,7 +14,6 @@ import pytest from faker import Faker -from models_library.basic_types import IDStr from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID @@ -30,8 +29,9 @@ pytest_simcore_core_services_selection = [ "migration", "postgres", - "storage", + "rabbit", "redis", + "storage", ] pytest_simcore_ops_services_selection = [ @@ -158,9 +158,7 @@ async def test_valid_upload_download( mock_io_log_redirect_cb: LogRedirectCB, faker: Faker, ): - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, project_id=project_id, @@ -206,9 +204,7 @@ async def test_valid_upload_download_saved_to( mock_io_log_redirect_cb: LogRedirectCB, faker: Faker, ): - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, project_id=project_id, @@ -256,9 +252,7 @@ async def test_delete_legacy_archive( temp_dir: Path, faker: Faker, ): - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: # NOTE: legacy archives can no longer be crated # generating a "legacy style archive" archive_into_dir = temp_dir / f"legacy-archive-dir-{uuid4()}" diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py index 717a428a1ed..018b047b3a8 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py @@ -14,7 +14,6 @@ import aiofiles import pytest from faker import Faker -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.file_utils import remove_directory @@ -26,8 +25,9 @@ pytest_simcore_core_services_selection = [ "migration", "postgres", - "storage", + "rabbit", "redis", + "storage", ] pytest_simcore_ops_services_selection = [ @@ -150,7 +150,7 @@ async def _report_progress_upload(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_upload, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await aws_s3_cli.sync_local_to_s3( aws_s3_cli_settings, @@ -177,7 +177,7 @@ async def _report_progress_download(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_download, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await aws_s3_cli.sync_s3_to_local( aws_s3_cli_settings, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 2e435d68a18..b7368cffd65 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -13,7 +13,6 @@ import pytest from aiohttp import ClientError from faker import Faker -from models_library.basic_types import IDStr from models_library.projects_nodes_io import ( LocationID, SimcoreS3DirectoryID, @@ -35,8 +34,9 @@ pytest_simcore_core_services_selection = [ "migration", "postgres", - "storage", + "rabbit", "redis", + "storage", ] pytest_simcore_ops_services_selection = ["minio", "adminer"] @@ -102,9 +102,7 @@ async def test_valid_upload_download( file_path = create_file_of_size(file_size, "test.test") file_id = create_valid_file_uuid("", file_path) - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, store_id=s3_simcore_location, @@ -192,9 +190,7 @@ async def test_valid_upload_download_using_file_object( assert file_metadata.etag == e_tag download_folder = Path(tmpdir) / "downloads" - async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: download_file_path = await filemanager.download_path_from_s3( user_id=user_id, store_id=s3_simcore_location, @@ -365,7 +361,7 @@ async def test_invalid_file_path( download_folder = Path(tmpdir) / "downloads" with pytest.raises(exceptions.S3InvalidPathError): async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -394,8 +390,8 @@ async def test_errors_upon_invalid_file_identifiers( assert file_path.exists() store = s3_simcore_location - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - invalid_s3_path = SimcoreS3FileID("") + with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 + invalid_s3_path = "" await filemanager.upload_path( user_id=user_id, store_id=store, @@ -406,7 +402,7 @@ async def test_errors_upon_invalid_file_identifiers( ) with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 - invalid_file_id = SimcoreS3FileID("file_id") + invalid_file_id = "file_id" await filemanager.upload_path( user_id=user_id, store_id=store, @@ -417,11 +413,11 @@ async def test_errors_upon_invalid_file_identifiers( ) download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: - invalid_s3_path = SimcoreS3FileID("") + invalid_s3_path = "" await filemanager.download_path_from_s3( user_id=user_id, store_id=store, @@ -436,7 +432,7 @@ async def test_errors_upon_invalid_file_identifiers( with pytest.raises(exceptions.S3InvalidPathError): async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -480,7 +476,7 @@ async def test_invalid_store( download_folder = Path(tmpdir) / "downloads" with pytest.raises(exceptions.S3InvalidStore): async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -594,13 +590,19 @@ async def test_invalid_call_raises_exception( with pytest.raises(exceptions.StorageInvalidCall): await fct( - user_id=None, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs # type: ignore + user_id=None, + store_id=s3_simcore_location, + s3_object=file_id, + **extra_kwargs, # type: ignore ) with pytest.raises(exceptions.StorageInvalidCall): await fct(user_id=user_id, store_id=None, s3_object=file_id, **extra_kwargs) # type: ignore with pytest.raises(exceptions.StorageInvalidCall): await fct( - user_id=user_id, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs # type: ignore + user_id=user_id, + store_id=s3_simcore_location, + s3_object="bing", + **extra_kwargs, # type: ignore ) @@ -686,9 +688,7 @@ async def test_upload_path_source_is_a_folder( assert isinstance(upload_result, UploadedFolder) assert source_dir.exists() - async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, store_name=None, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py index c94fc524bec..3beb4c6e0f2 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py @@ -15,7 +15,6 @@ import aiofiles import pytest from faker import Faker -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.file_utils import remove_directory @@ -27,8 +26,9 @@ pytest_simcore_core_services_selection = [ "migration", "postgres", - "storage", + "rabbit", "redis", + "storage", ] pytest_simcore_ops_services_selection = [ @@ -162,7 +162,7 @@ async def _report_progress_upload(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_upload, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await r_clone.sync_local_to_s3( r_clone_settings, @@ -189,7 +189,7 @@ async def _report_progress_download(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_download, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await r_clone.sync_s3_to_local( r_clone_settings, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index 40cf5d56c27..2affe04e190 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -45,8 +45,9 @@ pytest_simcore_core_services_selection = [ "migration", "postgres", - "storage", + "rabbit", "redis", + "storage", ] pytest_simcore_ops_services_selection = [ diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index a3710dfe27b..70ad8adbbc7 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -13,12 +13,11 @@ from aiohttp import ClientResponse, ClientSession, TCPConnector from aioresponses import aioresponses from faker import Faker -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( FileUploadLinks, FileUploadSchema, UploadedPart, ) -from models_library.basic_types import IDStr from moto.server import ThreadedMotoServer from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_mock import MockerFixture @@ -279,7 +278,7 @@ async def test_upload_file_to_presigned_links( assert effective_chunk_size <= used_chunk_size upload_links = await create_upload_links(num_links, used_chunk_size) assert len(upload_links.urls) == num_links - async with ProgressBarData(num_steps=1, description=IDStr("")) as progress_bar: + async with ProgressBarData(num_steps=1, description="") as progress_bar: uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_links( session=client_session, file_upload_links=upload_links, diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py index 3fb97424daa..3f3d722fa07 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_storage_client.py @@ -3,7 +3,7 @@ import datetime from collections.abc import AsyncIterable -from typing import Final +from typing import Any, Final import pytest from aiohttp import ClientResponseError, ClientSession @@ -14,12 +14,14 @@ _ROUTE_ALWAYS_200_OK: Final[str] = "http://always-200-ok" -_MOCK_RESPONSE_BODY: Final[str] = "mock_body" +_MOCK_RESPONSE_BODY: Final[dict[str, Any]] = {"data": "mock_body"} @pytest.fixture def mock_responses(aioresponses_mocker: aioresponses) -> None: - aioresponses_mocker.get(_ROUTE_ALWAYS_200_OK, status=200, body=_MOCK_RESPONSE_BODY) + aioresponses_mocker.get( + _ROUTE_ALWAYS_200_OK, status=200, payload=_MOCK_RESPONSE_BODY + ) @pytest.fixture @@ -46,7 +48,7 @@ async def test_retry_request_ok(mock_responses: None, session: ClientSession): session, "GET", _ROUTE_ALWAYS_200_OK, expected_status=200 ) as response: assert response.status == 200 - assert await response.text() == _MOCK_RESPONSE_BODY + assert await response.json() == _MOCK_RESPONSE_BODY async def test_retry_request_unexpected_code( diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 063c71f99f4..6817d788faa 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -21,7 +21,7 @@ from aiohttp.client import ClientSession from aioresponses import aioresponses as AioResponsesMock from faker import Faker -from models_library.api_schemas_storage import FileMetaDataGet +from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet from models_library.projects_nodes_io import LocationID from pydantic import TypeAdapter, ValidationError from pytest_mock.plugin import MockerFixture @@ -219,7 +219,7 @@ async def mock_filemanager(mocker: MockerFixture, e_tag: str, faker: Faker) -> N mocker.patch( "simcore_sdk.node_ports_common.filemanager._get_file_meta_data", return_value=TypeAdapter(FileMetaDataGet).validate_python( - FileMetaDataGet.model_config["json_schema_extra"]["examples"][0], + FileMetaDataGet.model_json_schema()["examples"][0], ), ) mocker.patch( diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index 7786aafe494..feb61ed2042 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -12,7 +12,7 @@ import pytest from aioresponses import aioresponses as AioResponsesMock from faker import Faker -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( FileLocationArray, FileMetaDataGet, FileUploadSchema, @@ -24,15 +24,17 @@ from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.aiohttp import status from simcore_sdk.node_ports_common import exceptions -from simcore_sdk.node_ports_common._filemanager import _get_https_link_if_storage_secure +from simcore_sdk.node_ports_common._filemanager_utils import ( + _get_https_link_if_storage_secure, +) from simcore_sdk.node_ports_common.storage_client import ( LinkType, delete_file, get_download_file_link, get_file_metadata, - get_storage_locations, get_upload_file_links, list_file_metadata, + list_storage_locations, ) from simcore_sdk.node_ports_common.storage_endpoint import ( get_base_url, @@ -92,14 +94,14 @@ async def session() -> AsyncIterator[aiohttp.ClientSession]: yield session -async def test_get_storage_locations( +async def test_list_storage_locations( clear_caches: None, storage_v0_service_mock: AioResponsesMock, mock_postgres: EnvVarsDict, session: aiohttp.ClientSession, user_id: UserID, ): - result = await get_storage_locations(session=session, user_id=user_id) + result = await list_storage_locations(session=session, user_id=user_id) assert isinstance(result, FileLocationArray) # type: ignore assert len(result) == 1 @@ -177,7 +179,7 @@ async def test_get_file_metada( ) assert file_metadata assert file_metadata == FileMetaDataGet.model_validate( - FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] + FileMetaDataGet.model_json_schema()["examples"][0] ) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 999f7a1beab..d41bc183c30 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -72,11 +72,10 @@ pytest-lazy-fixture>=999999999 # avoid downgrades of openapi-spec-validator related libraries referencing<=0.35.1 -# moto==5.0.21 broke `get_ec2_instance_capabilities` -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/7086 -moto<5.0.21 - # Pin boto3<1.36.0 till the following is addressed https://github.com/boto/botocore/issues/2308 # when removing this pin, also update the aws cli inside ci/github/helpers/install_aws_cli_v2.bash # SEE https://github.com/ITISFoundation/osparc-simcore/issues/7127 boto3<1.36.0 + +# See issue https://github.com/ITISFoundation/osparc-simcore/issues/7300 +pydantic-settings<2.7.1 diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py index 20e1d380d12..1f7ac0b2c21 100644 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/cli.py @@ -1,10 +1,13 @@ import asyncio +import os from pathlib import Path from typing import Annotated, Optional import parse import rich import typer +from ansible.inventory.manager import InventoryManager +from ansible.parsing.dataloader import DataLoader from dotenv import dotenv_values from . import core as api @@ -17,7 +20,7 @@ wallet_id_spec, ) from .ec2 import autoscaling_ec2_client, cluster_keeper_ec2_client -from .models import AppState +from .models import AppState, BastionHost state: AppState = AppState( dynamic_parser=parse.compile(DEFAULT_DYNAMIC_EC2_FORMAT), @@ -32,13 +35,13 @@ app = typer.Typer() -def _parse_environment(deploy_config: Path) -> dict[str, str | None]: +def _parse_repo_config(deploy_config: Path) -> dict[str, str | None]: repo_config = deploy_config / "repo.config" if not repo_config.exists(): rich.print( - f"[red]{repo_config} does not exist! Please run OPS code to generate it[/red]" + f"[red]{repo_config} does not exist! Please run `make repo.config` in {deploy_config} to generate it[/red]" ) - raise typer.Exit(1) + raise typer.Exit(os.EX_DATAERR) environment = dotenv_values(repo_config) @@ -46,11 +49,34 @@ def _parse_environment(deploy_config: Path) -> dict[str, str | None]: return environment +def _parse_inventory(deploy_config: Path) -> BastionHost: + inventory_path = deploy_config / "ansible" / "inventory.ini" + if not inventory_path.exists(): + rich.print( + f"[red]{inventory_path} does not exist! Please run `make inventory` in {deploy_config} to generate it[/red]" + ) + raise typer.Exit(os.EX_DATAERR) + + loader = DataLoader() + inventory = InventoryManager(loader=loader, sources=[f"{inventory_path}"]) + + try: + return BastionHost( + ip=inventory.groups["CAULDRON_UNIX"].get_vars()["bastion_ip"], + user_name=inventory.groups["CAULDRON_UNIX"].get_vars()["bastion_user"], + ) + except KeyError as err: + rich.print( + f"[red]{inventory_path} invalid! Unable to find bastion_ip in the inventory file. TIP: Please run `make inventory` in {deploy_config} to generate it[/red]" + ) + raise typer.Exit(os.EX_DATAERR) from err + + @app.callback() def main( deploy_config: Annotated[ Path, typer.Option(help="path to the deploy configuration") - ] + ], ): """Manages external clusters""" @@ -58,7 +84,8 @@ def main( assert ( deploy_config.is_dir() ), "deploy-config argument is not pointing to a directory!" - state.environment = _parse_environment(deploy_config) + state.environment = _parse_repo_config(deploy_config) + state.main_bastion_host = _parse_inventory(deploy_config) # connect to ec2s state.ec2_resource_autoscaling = autoscaling_ec2_client(state) @@ -113,7 +140,8 @@ def summary( """ - asyncio.run(api.summary(state, user_id or None, wallet_id or None)) + if not asyncio.run(api.summary(state, user_id or None, wallet_id or None)): + raise typer.Exit(1) @app.command() @@ -157,5 +185,11 @@ def trigger_cluster_termination( asyncio.run(api.trigger_cluster_termination(state, user_id, wallet_id)) +@app.command() +def check_database_connection() -> None: + """this will check the connection to simcore database is ready""" + asyncio.run(api.check_database_connection(state)) + + if __name__ == "__main__": app() diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py index 540b4581ab6..9801b399869 100755 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py @@ -142,7 +142,7 @@ def _print_dynamic_instances( f"Up: {utils.timedelta_formatting(time_now - instance.ec2_instance.launch_time, color_code=True)}", f"ExtIP: {instance.ec2_instance.public_ip_address}", f"IntIP: {instance.ec2_instance.private_ip_address}", - f"/mnt/docker(free): {utils.color_encode_with_threshold(instance.disk_space.human_readable(), instance.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", + f"/mnt/docker(free): {utils.color_encode_with_threshold(instance.disk_space.human_readable(), instance.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", ] ), service_table, @@ -190,7 +190,7 @@ def _print_computational_clusters( f"UserID: {cluster.primary.user_id}", f"WalletID: {cluster.primary.wallet_id}", f"Heartbeat: {utils.timedelta_formatting(time_now - cluster.primary.last_heartbeat) if cluster.primary.last_heartbeat else 'n/a'}", - f"/mnt/docker(free): {utils.color_encode_with_threshold(cluster.primary.disk_space.human_readable(), cluster.primary.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", + f"/mnt/docker(free): {utils.color_encode_with_threshold(cluster.primary.disk_space.human_readable(), cluster.primary.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", ] ), "\n".join( @@ -223,7 +223,7 @@ def _print_computational_clusters( table.add_row( "\n".join( [ - f"[italic]{utils.color_encode_with_state(f'Worker {index+1}', worker.ec2_instance)}[/italic]", + f"[italic]{utils.color_encode_with_state(f'Worker {index + 1}', worker.ec2_instance)}[/italic]", f"Name: {worker.name}", f"ID: {worker.ec2_instance.id}", f"AMI: {worker.ec2_instance.image_id}", @@ -232,7 +232,7 @@ def _print_computational_clusters( f"ExtIP: {worker.ec2_instance.public_ip_address}", f"IntIP: {worker.ec2_instance.private_ip_address}", f"DaskWorkerIP: {worker.dask_ip}", - f"/mnt/docker(free): {utils.color_encode_with_threshold(worker.disk_space.human_readable(), worker.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", + f"/mnt/docker(free): {utils.color_encode_with_threshold(worker.disk_space.human_readable(), worker.disk_space, TypeAdapter(ByteSize).validate_python('15Gib'))}", "", ] ), @@ -301,7 +301,6 @@ async def _analyze_computational_instances( computational_instances: list[ComputationalInstance], ssh_key_path: Path | None, ) -> list[ComputationalCluster]: - all_disk_spaces = [UNDEFINED_BYTESIZE] * len(computational_instances) if ssh_key_path is not None: all_disk_spaces = await asyncio.gather( @@ -414,7 +413,7 @@ async def _parse_dynamic_instances( return dynamic_instances -async def summary(state: AppState, user_id: int | None, wallet_id: int | None) -> None: +async def summary(state: AppState, user_id: int | None, wallet_id: int | None) -> bool: # get all the running instances assert state.ec2_resource_autoscaling dynamic_instances = await ec2.list_dynamic_instances_from_ec2( @@ -429,6 +428,14 @@ async def summary(state: AppState, user_id: int | None, wallet_id: int | None) - state.ec2_resource_autoscaling.meta.client.meta.region_name, ) + time_threshold = arrow.utcnow().shift(minutes=-30).datetime + + dynamic_services_in_error = any( + service.needs_manual_intervention and service.created_at < time_threshold + for instance in dynamic_autoscaled_instances + for service in instance.running_services + ) + assert state.ec2_resource_clusters_keeper computational_instances = await ec2.list_computational_instances_from_ec2( state, user_id, wallet_id @@ -442,6 +449,8 @@ async def summary(state: AppState, user_id: int | None, wallet_id: int | None) - state.ec2_resource_clusters_keeper.meta.client.meta.region_name, ) + return not dynamic_services_in_error + def _print_computational_tasks( user_id: int, @@ -638,3 +647,7 @@ async def trigger_cluster_termination( ) else: rich.print("not deleting anything") + + +async def check_database_connection(state: AppState) -> None: + await db.check_db_connection(state) diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py index 750ef816bc8..d6ca45ba7a3 100644 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/dask.py @@ -36,7 +36,6 @@ async def dask_client( ) try: - async with contextlib.AsyncExitStack() as stack: if instance.public_ip_address is not None: url = AnyUrl(f"tls://{instance.public_ip_address}:{_SCHEDULER_PORT}") @@ -85,7 +84,7 @@ async def trigger_job_cancellation_in_scheduler( task_id: TaskId, ) -> None: async with dask_client(state, cluster.primary.ec2_instance) as client: - task_future = distributed.Future(task_id) + task_future = distributed.Future(task_id, client=client) cancel_event = distributed.Event( name=TASK_CANCEL_EVENT_NAME_TEMPLATE.format(task_future.key), client=client, @@ -112,13 +111,13 @@ def _list_tasks( return dict(task_state_to_tasks) - list_of_tasks: dict[TaskState, list[TaskId]] = [] + list_of_tasks: dict[TaskState, list[TaskId]] = {} try: - list_of_tasks = await client.run_on_scheduler( - _list_tasks - ) # type: ignore + list_of_tasks = await client.run_on_scheduler(_list_tasks) # type: ignore except TypeError: - rich.print(f"ERROR while recoverring unrunnable tasks using {dask_client=}. Defaulting to empty list of tasks!!") + rich.print( + f"ERROR while recoverring unrunnable tasks using {dask_client=}. Defaulting to empty list of tasks!!" + ) return list_of_tasks diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py index 615fe6b82a6..14190934aa1 100644 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py @@ -1,3 +1,4 @@ +import asyncio import contextlib import uuid from collections.abc import AsyncGenerator @@ -9,37 +10,59 @@ from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine from .models import AppState, ComputationalTask, PostgresDB +from .ssh import ssh_tunnel @contextlib.asynccontextmanager -async def db_engine(state: AppState) -> AsyncGenerator[AsyncEngine, Any]: - engine = None - try: - for env in [ - "POSTGRES_USER", - "POSTGRES_PASSWORD", - "POSTGRES_ENDPOINT", - "POSTGRES_DB", - ]: - assert state.environment[env] - postgres_db = PostgresDB( - dsn=TypeAdapter(PostgresDsn).validate_python( - f"postgresql+asyncpg://{state.environment['POSTGRES_USER']}:{state.environment['POSTGRES_PASSWORD']}@{state.environment['POSTGRES_ENDPOINT']}/{state.environment['POSTGRES_DB']}" +async def db_engine( + state: AppState, +) -> AsyncGenerator[AsyncEngine, Any]: + async with contextlib.AsyncExitStack() as stack: + assert state.environment["POSTGRES_ENDPOINT"] # nosec + db_endpoint = state.environment["POSTGRES_ENDPOINT"] + if state.main_bastion_host: + assert state.ssh_key_path # nosec + db_host, db_port = db_endpoint.split(":") + tunnel = stack.enter_context( + ssh_tunnel( + ssh_host=state.main_bastion_host.ip, + username=state.main_bastion_host.user_name, + private_key_path=state.ssh_key_path, + remote_bind_host=db_host, + remote_bind_port=int(db_port), + ) + ) + assert tunnel + db_endpoint = ( + f"{tunnel.local_bind_address[0]}:{tunnel.local_bind_address[1]}" ) - ) - engine = create_async_engine( - f"{postgres_db.dsn}", - connect_args={ - "server_settings": { - "application_name": "osparc-clusters-monitoring-script" - } - }, - ) - yield engine - finally: - if engine: - await engine.dispose() + engine = None + try: + for env in [ + "POSTGRES_USER", + "POSTGRES_PASSWORD", + "POSTGRES_DB", + ]: + assert state.environment[env] + postgres_db = PostgresDB( + dsn=TypeAdapter(PostgresDsn).validate_python( + f"postgresql+asyncpg://{state.environment['POSTGRES_USER']}:{state.environment['POSTGRES_PASSWORD']}@{db_endpoint}/{state.environment['POSTGRES_DB']}" + ) + ) + + engine = create_async_engine( + f"{postgres_db.dsn}", + connect_args={ + "server_settings": { + "application_name": "osparc-clusters-monitoring-script" + } + }, + ) + yield engine + finally: + if engine: + await engine.dispose() async def abort_job_in_db( @@ -57,6 +80,23 @@ async def abort_job_in_db( rich.print(f"set comp_tasks for {project_id=}/{node_id=} set to ABORTED") +async def check_db_connection(state: AppState) -> bool: + try: + async with contextlib.AsyncExitStack() as stack: + engine = await stack.enter_async_context(db_engine(state)) + async with asyncio.timeout(5): + db_connection = await stack.enter_async_context(engine.connect()) + result = await db_connection.execute(sa.text("SELECT 1")) + result.one() + rich.print( + "[green]Database connection test completed successfully![/green]" + ) + return True + except Exception as e: # pylint: disable=broad-exception-caught + rich.print(f"[red]Database connection test failed: {e}[/red]") + return False + + async def list_computational_tasks_from_db( state: AppState, user_id: int ) -> list[ComputationalTask]: diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/models.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/models.py index 247fe858e97..84e992294d5 100644 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/models.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/models.py @@ -12,6 +12,12 @@ from pydantic import BaseModel, ByteSize, PostgresDsn +@dataclass(kw_only=True, frozen=True, slots=True) +class BastionHost: + ip: str + user_name: str + + @dataclass(kw_only=True) class AppState: environment: dict[str, str | None] = field(default_factory=dict) @@ -22,6 +28,7 @@ class AppState: computational_parser_workers: parse.Parser deploy_config: Path | None = None ssh_key_path: Path | None = None + main_bastion_host: BastionHost | None = None computational_bastion: Instance | None = None dynamic_bastion: Instance | None = None diff --git a/scripts/maintenance/computational-clusters/pyproject.toml b/scripts/maintenance/computational-clusters/pyproject.toml index b3db20eb921..acd2a509975 100644 --- a/scripts/maintenance/computational-clusters/pyproject.toml +++ b/scripts/maintenance/computational-clusters/pyproject.toml @@ -6,8 +6,8 @@ dependencies = [ "black", "boto3", # NOTE: these must be in sync with ospar - "cloudpickle", - "dask[distributed]", + "cloudpickle==3.1.0", + "dask[distributed]==2024.12.0", "mypy_boto3_ec2", "types-boto3", "parse", @@ -19,6 +19,7 @@ dependencies = [ "rich", "sqlalchemy[asyncio]", "sshtunnel", + "ansible>=10.7.0", ] name = "autoscaled-monitor" version = "1.0.0" diff --git a/scripts/maintenance/computational-clusters/uv.lock b/scripts/maintenance/computational-clusters/uv.lock new file mode 100644 index 00000000000..d2ac6d7f173 --- /dev/null +++ b/scripts/maintenance/computational-clusters/uv.lock @@ -0,0 +1,1443 @@ +version = 1 +revision = 1 +requires-python = ">=3.10" +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version < '3.11'", +] + +[[package]] +name = "aiocache" +version = "0.12.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7a/64/b945b8025a9d1e6e2138845f4022165d3b337f55f50984fbc6a4c0a1e355/aiocache-0.12.3.tar.gz", hash = "sha256:f528b27bf4d436b497a1d0d1a8f59a542c153ab1e37c3621713cb376d44c4713", size = 132196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/d7/15d67e05b235d1ed8c3ce61688fe4d84130e72af1657acadfaac3479f4cf/aiocache-0.12.3-py2.py3-none-any.whl", hash = "sha256:889086fc24710f431937b87ad3720a289f7fc31c4fd8b68e9f918b9bacd8270d", size = 28199 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "ansible" +version = "10.7.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "ansible-core", version = "2.17.9", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d4/64/29fdff6fe7682342adb54802c1cd90b2272d382e1743089af88f90a1d986/ansible-10.7.0.tar.gz", hash = "sha256:59d29e3de1080e740dfa974517d455217601b16d16880314d9be26145c68dc22", size = 41256974 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/95/cb8944902a2cdd94b1e19ff73695548679a388b9c473dc63c8dc64ffea3a/ansible-10.7.0-py3-none-any.whl", hash = "sha256:0089f08e047ceb70edd011be009f5c6273add613fbe491e9697c0556c989d8ea", size = 51576038 }, +] + +[[package]] +name = "ansible" +version = "11.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", +] +dependencies = [ + { name = "ansible-core", version = "2.18.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/33a32c7e119c594d87d599afdf787ba50262042b8daaeaa32036b3fc446d/ansible-11.3.0.tar.gz", hash = "sha256:90b409f630dc6d558224409a3948314ede1bcda6db2d03c17708cef6117a6103", size = 42493561 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/bc/d91554f4490314ebea31b375ffdbe5ef130211ac0486d41b940dae95bcc7/ansible-11.3.0-py3-none-any.whl", hash = "sha256:699c76dd00e841e7307d4829b0454c3ee746df8e4488fa10825014a01d42efcc", size = 54163606 }, +] + +[[package]] +name = "ansible-core" +version = "2.17.9" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "cryptography", marker = "python_full_version < '3.11'" }, + { name = "jinja2", marker = "python_full_version < '3.11'" }, + { name = "packaging", marker = "python_full_version < '3.11'" }, + { name = "pyyaml", marker = "python_full_version < '3.11'" }, + { name = "resolvelib", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/14/f2/0453c53a66145b10edf3aa64ee9fb18896a9ee93f7f741ef8ab6ddfa42b0/ansible_core-2.17.9.tar.gz", hash = "sha256:c24cdc2bab19b910bbdb4a1074af5745e16c78c618f15829e7ddcf699f69a510", size = 3107948 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/99/0fa75683fbcc97138f149b6e6811270ba96bd3be5a1be667e829393e100b/ansible_core-2.17.9-py3-none-any.whl", hash = "sha256:d2fde719fa8bcaa303ae9b289099c4d49d6566d06e233a47b01de0d4e5438f7b", size = 2197369 }, +] + +[[package]] +name = "ansible-core" +version = "2.18.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", +] +dependencies = [ + { name = "cryptography", marker = "python_full_version >= '3.11'" }, + { name = "jinja2", marker = "python_full_version >= '3.11'" }, + { name = "packaging", marker = "python_full_version >= '3.11'" }, + { name = "pyyaml", marker = "python_full_version >= '3.11'" }, + { name = "resolvelib", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/26/409259cf95b0ef3471f45837cfda01ae87bcced66dfef7691715184550cf/ansible_core-2.18.3.tar.gz", hash = "sha256:8c4eaca40845238e2601b9bc9dbfbd4f6ed3502cb8b2632789f75ce478abfdee", size = 3077314 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/df/8f1d7ec589ceba8c34ebdf7abc083b99ab8c9112bda5f3bfb88b223f75bb/ansible_core-2.18.3-py3-none-any.whl", hash = "sha256:4d5120916b6d36881185c0c7231cdb7b1675f7dddd1a7a833a7d67d56bcdfcc8", size = 2216727 }, +] + +[[package]] +name = "arrow" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "types-python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419 }, +] + +[[package]] +name = "astroid" +version = "3.3.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/33/536530122a22a7504b159bccaf30a1f76aa19d23028bd8b5009eb9b2efea/astroid-3.3.9.tar.gz", hash = "sha256:622cc8e3048684aa42c820d9d218978021c3c3d174fb03a9f0d615921744f550", size = 398731 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/80/c749efbd8eef5ea77c7d6f1956e8fbfb51963b7f93ef79647afd4d9886e3/astroid-3.3.9-py3-none-any.whl", hash = "sha256:d05bfd0acba96a7bd43e222828b7d9bc1e138aaeb0649707908d3702a9831248", size = 275339 }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233 }, +] + +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/07/1650a8c30e3a5c625478fa8aafd89a8dd7d85999bf7169b16f54973ebf2c/asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e", size = 673143 }, + { url = "https://files.pythonhosted.org/packages/a0/9a/568ff9b590d0954553c56806766914c149609b828c426c5118d4869111d3/asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0", size = 645035 }, + { url = "https://files.pythonhosted.org/packages/de/11/6f2fa6c902f341ca10403743701ea952bca896fc5b07cc1f4705d2bb0593/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f", size = 2912384 }, + { url = "https://files.pythonhosted.org/packages/83/83/44bd393919c504ffe4a82d0aed8ea0e55eb1571a1dea6a4922b723f0a03b/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af", size = 2947526 }, + { url = "https://files.pythonhosted.org/packages/08/85/e23dd3a2b55536eb0ded80c457b0693352262dc70426ef4d4a6fc994fa51/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75", size = 2895390 }, + { url = "https://files.pythonhosted.org/packages/9b/26/fa96c8f4877d47dc6c1864fef5500b446522365da3d3d0ee89a5cce71a3f/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f", size = 3015630 }, + { url = "https://files.pythonhosted.org/packages/34/00/814514eb9287614188a5179a8b6e588a3611ca47d41937af0f3a844b1b4b/asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf", size = 568760 }, + { url = "https://files.pythonhosted.org/packages/f0/28/869a7a279400f8b06dd237266fdd7220bc5f7c975348fea5d1e6909588e9/asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50", size = 625764 }, + { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506 }, + { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922 }, + { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565 }, + { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962 }, + { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791 }, + { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696 }, + { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358 }, + { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375 }, + { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162 }, + { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025 }, + { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243 }, + { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059 }, + { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596 }, + { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632 }, + { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186 }, + { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064 }, + { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373 }, + { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745 }, + { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103 }, + { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471 }, + { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253 }, + { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720 }, + { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404 }, + { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623 }, +] + +[[package]] +name = "autoscaled-monitor" +version = "1.0.0" +source = { virtual = "." } +dependencies = [ + { name = "aiocache" }, + { name = "ansible", version = "10.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "ansible", version = "11.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "arrow" }, + { name = "asyncpg" }, + { name = "black" }, + { name = "boto3" }, + { name = "cloudpickle" }, + { name = "dask", extra = ["distributed"] }, + { name = "mypy-boto3-ec2" }, + { name = "paramiko" }, + { name = "parse" }, + { name = "pydantic", extra = ["email"] }, + { name = "pylint" }, + { name = "python-dotenv" }, + { name = "rich" }, + { name = "sqlalchemy", extra = ["asyncio"] }, + { name = "sshtunnel" }, + { name = "typer" }, + { name = "types-boto3" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiocache" }, + { name = "ansible", specifier = ">=10.7.0" }, + { name = "arrow" }, + { name = "asyncpg" }, + { name = "black" }, + { name = "boto3" }, + { name = "cloudpickle", specifier = "==3.1.0" }, + { name = "dask", extras = ["distributed"], specifier = "==2024.12.0" }, + { name = "mypy-boto3-ec2" }, + { name = "paramiko" }, + { name = "parse" }, + { name = "pydantic", extras = ["email"] }, + { name = "pylint" }, + { name = "python-dotenv" }, + { name = "rich" }, + { name = "sqlalchemy", extras = ["asyncio"] }, + { name = "sshtunnel" }, + { name = "typer" }, + { name = "types-boto3" }, +] + +[[package]] +name = "bcrypt" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/2c/3d44e853d1fe969d229bd58d39ae6902b3d924af0e2b5a60d17d4b809ded/bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281", size = 483719 }, + { url = "https://files.pythonhosted.org/packages/a1/e2/58ff6e2a22eca2e2cff5370ae56dba29d70b1ea6fc08ee9115c3ae367795/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb", size = 272001 }, + { url = "https://files.pythonhosted.org/packages/37/1f/c55ed8dbe994b1d088309e366749633c9eb90d139af3c0a50c102ba68a1a/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180", size = 277451 }, + { url = "https://files.pythonhosted.org/packages/d7/1c/794feb2ecf22fe73dcfb697ea7057f632061faceb7dcf0f155f3443b4d79/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f", size = 272792 }, + { url = "https://files.pythonhosted.org/packages/13/b7/0b289506a3f3598c2ae2bdfa0ea66969812ed200264e3f61df77753eee6d/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09", size = 289752 }, + { url = "https://files.pythonhosted.org/packages/dc/24/d0fb023788afe9e83cc118895a9f6c57e1044e7e1672f045e46733421fe6/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d", size = 277762 }, + { url = "https://files.pythonhosted.org/packages/e4/38/cde58089492e55ac4ef6c49fea7027600c84fd23f7520c62118c03b4625e/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd", size = 272384 }, + { url = "https://files.pythonhosted.org/packages/de/6a/d5026520843490cfc8135d03012a413e4532a400e471e6188b01b2de853f/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af", size = 277329 }, + { url = "https://files.pythonhosted.org/packages/b3/a3/4fc5255e60486466c389e28c12579d2829b28a527360e9430b4041df4cf9/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231", size = 305241 }, + { url = "https://files.pythonhosted.org/packages/c7/15/2b37bc07d6ce27cc94e5b10fd5058900eb8fb11642300e932c8c82e25c4a/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c", size = 309617 }, + { url = "https://files.pythonhosted.org/packages/5f/1f/99f65edb09e6c935232ba0430c8c13bb98cb3194b6d636e61d93fe60ac59/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f", size = 335751 }, + { url = "https://files.pythonhosted.org/packages/00/1b/b324030c706711c99769988fcb694b3cb23f247ad39a7823a78e361bdbb8/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d", size = 355965 }, + { url = "https://files.pythonhosted.org/packages/aa/dd/20372a0579dd915dfc3b1cd4943b3bca431866fcb1dfdfd7518c3caddea6/bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4", size = 155316 }, + { url = "https://files.pythonhosted.org/packages/6d/52/45d969fcff6b5577c2bf17098dc36269b4c02197d551371c023130c0f890/bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669", size = 147752 }, + { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019 }, + { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174 }, + { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870 }, + { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601 }, + { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660 }, + { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083 }, + { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237 }, + { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737 }, + { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741 }, + { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472 }, + { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606 }, + { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867 }, + { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589 }, + { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794 }, + { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969 }, + { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158 }, + { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285 }, + { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583 }, + { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896 }, + { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492 }, + { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213 }, + { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162 }, + { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856 }, + { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726 }, + { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664 }, + { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128 }, + { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598 }, + { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799 }, + { url = "https://files.pythonhosted.org/packages/55/2d/0c7e5ab0524bf1a443e34cdd3926ec6f5879889b2f3c32b2f5074e99ed53/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1", size = 275367 }, + { url = "https://files.pythonhosted.org/packages/10/4f/f77509f08bdff8806ecc4dc472b6e187c946c730565a7470db772d25df70/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d", size = 280644 }, + { url = "https://files.pythonhosted.org/packages/35/18/7d9dc16a3a4d530d0a9b845160e9e5d8eb4f00483e05d44bb4116a1861da/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492", size = 274881 }, + { url = "https://files.pythonhosted.org/packages/df/c4/ae6921088adf1e37f2a3a6a688e72e7d9e45fdd3ae5e0bc931870c1ebbda/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90", size = 280203 }, + { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103 }, + { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513 }, + { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685 }, + { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110 }, +] + +[[package]] +name = "black" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419 }, + { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080 }, + { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886 }, + { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404 }, + { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372 }, + { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865 }, + { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699 }, + { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028 }, + { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988 }, + { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985 }, + { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816 }, + { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860 }, + { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673 }, + { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190 }, + { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926 }, + { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613 }, + { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646 }, +] + +[[package]] +name = "boto3" +version = "1.37.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/f7/b870fb8d2ca96a996db97c9d30d1eb087b341cec1004722e99672a79800d/boto3-1.37.9.tar.gz", hash = "sha256:51b76da93d7c2a3dff6155ee4aa25455940e7ade08292d22aeeed08b9e0dbf0b", size = 111409 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/75/625b7e1cc2a8b470760273082e0c4d36c8ac8250ad2ff9df54528d1379b4/boto3-1.37.9-py3-none-any.whl", hash = "sha256:cfff6e9cf4b987480fbbe85873f3e609aeea1906631c7148e5b7e3c778aebd40", size = 139544 }, +] + +[[package]] +name = "botocore" +version = "1.37.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/9e/97ee17ec39395c67edfa694ae9e87177fdb7433e65be91bc84fef983dd86/botocore-1.37.9.tar.gz", hash = "sha256:2fdafbb9c44196cd371f4890aedf9f54352348fbae624a3880862d35724f0956", size = 13635008 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/7f/5463336d2bf8eb5131c063e37599fa2f504abaa26b197aea2d2faea78a4c/botocore-1.37.9-py3-none-any.whl", hash = "sha256:bf0ab085ae85a4a2fa1733321069c1039745fa65ca9f335a91b8712fd6745d5f", size = 13403070 }, +] + +[[package]] +name = "botocore-stubs" +version = "1.37.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-awscrt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/de/f819c3aad257f499cf8848ee492048285c5d22cac69c89a58a79065ade7a/botocore_stubs-1.37.9.tar.gz", hash = "sha256:cba0caaf9d5d15007c8a85cf1978d64ef8d69b62c33f4b3345e841dc489c9f96", size = 42141 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/ba/0c01baf5246d03ad5e46bc8dc294fdc2692fa8fa17459277d8ecd80ad461/botocore_stubs-1.37.9-py3-none-any.whl", hash = "sha256:429974afd8ecb881636da99ec3d6c779d9282ffb3174f021c71694c0afc1130c", size = 65371 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "cloudpickle" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/97/c7/f746cadd08c4c08129215cf1b984b632f9e579fc781301e63da9e85c76c1/cloudpickle-3.1.0.tar.gz", hash = "sha256:81a929b6e3c7335c863c771d673d105f02efdb89dfaba0c90495d1c64796601b", size = 66155 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/41/e1d85ca3cab0b674e277c8c4f678cf66a91cd2cecf93df94353a606fe0db/cloudpickle-3.1.0-py3-none-any.whl", hash = "sha256:fe11acda67f61aaaec473e3afe030feb131d78a43461b718185363384f1ba12e", size = 22021 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "cryptography" +version = "44.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/25/4ce80c78963834b8a9fd1cc1266be5ed8d1840785c0f2e1b73b8d128d505/cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", size = 710807 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/ef/83e632cfa801b221570c5f58c0369db6fa6cef7d9ff859feab1aae1a8a0f/cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", size = 6676361 }, + { url = "https://files.pythonhosted.org/packages/30/ec/7ea7c1e4c8fc8329506b46c6c4a52e2f20318425d48e0fe597977c71dbce/cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", size = 3952350 }, + { url = "https://files.pythonhosted.org/packages/27/61/72e3afdb3c5ac510330feba4fc1faa0fe62e070592d6ad00c40bb69165e5/cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", size = 4166572 }, + { url = "https://files.pythonhosted.org/packages/26/e4/ba680f0b35ed4a07d87f9e98f3ebccb05091f3bf6b5a478b943253b3bbd5/cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", size = 3958124 }, + { url = "https://files.pythonhosted.org/packages/9c/e8/44ae3e68c8b6d1cbc59040288056df2ad7f7f03bbcaca6b503c737ab8e73/cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", size = 3678122 }, + { url = "https://files.pythonhosted.org/packages/27/7b/664ea5e0d1eab511a10e480baf1c5d3e681c7d91718f60e149cec09edf01/cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", size = 4191831 }, + { url = "https://files.pythonhosted.org/packages/2a/07/79554a9c40eb11345e1861f46f845fa71c9e25bf66d132e123d9feb8e7f9/cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", size = 3960583 }, + { url = "https://files.pythonhosted.org/packages/bb/6d/858e356a49a4f0b591bd6789d821427de18432212e137290b6d8a817e9bf/cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308", size = 4191753 }, + { url = "https://files.pythonhosted.org/packages/b2/80/62df41ba4916067fa6b125aa8c14d7e9181773f0d5d0bd4dcef580d8b7c6/cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", size = 4079550 }, + { url = "https://files.pythonhosted.org/packages/f3/cd/2558cc08f7b1bb40683f99ff4327f8dcfc7de3affc669e9065e14824511b/cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", size = 4298367 }, + { url = "https://files.pythonhosted.org/packages/71/59/94ccc74788945bc3bd4cf355d19867e8057ff5fdbcac781b1ff95b700fb1/cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", size = 2772843 }, + { url = "https://files.pythonhosted.org/packages/ca/2c/0d0bbaf61ba05acb32f0841853cfa33ebb7a9ab3d9ed8bb004bd39f2da6a/cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", size = 3209057 }, + { url = "https://files.pythonhosted.org/packages/9e/be/7a26142e6d0f7683d8a382dd963745e65db895a79a280a30525ec92be890/cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", size = 6677789 }, + { url = "https://files.pythonhosted.org/packages/06/88/638865be7198a84a7713950b1db7343391c6066a20e614f8fa286eb178ed/cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", size = 3951919 }, + { url = "https://files.pythonhosted.org/packages/d7/fc/99fe639bcdf58561dfad1faa8a7369d1dc13f20acd78371bb97a01613585/cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", size = 4167812 }, + { url = "https://files.pythonhosted.org/packages/53/7b/aafe60210ec93d5d7f552592a28192e51d3c6b6be449e7fd0a91399b5d07/cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", size = 3958571 }, + { url = "https://files.pythonhosted.org/packages/16/32/051f7ce79ad5a6ef5e26a92b37f172ee2d6e1cce09931646eef8de1e9827/cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", size = 3679832 }, + { url = "https://files.pythonhosted.org/packages/78/2b/999b2a1e1ba2206f2d3bca267d68f350beb2b048a41ea827e08ce7260098/cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", size = 4193719 }, + { url = "https://files.pythonhosted.org/packages/72/97/430e56e39a1356e8e8f10f723211a0e256e11895ef1a135f30d7d40f2540/cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", size = 3960852 }, + { url = "https://files.pythonhosted.org/packages/89/33/c1cf182c152e1d262cac56850939530c05ca6c8d149aa0dcee490b417e99/cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", size = 4193906 }, + { url = "https://files.pythonhosted.org/packages/e1/99/87cf26d4f125380dc674233971069bc28d19b07f7755b29861570e513650/cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", size = 4081572 }, + { url = "https://files.pythonhosted.org/packages/b3/9f/6a3e0391957cc0c5f84aef9fbdd763035f2b52e998a53f99345e3ac69312/cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", size = 4298631 }, + { url = "https://files.pythonhosted.org/packages/e2/a5/5bc097adb4b6d22a24dea53c51f37e480aaec3465285c253098642696423/cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", size = 2773792 }, + { url = "https://files.pythonhosted.org/packages/33/cf/1f7649b8b9a3543e042d3f348e398a061923ac05b507f3f4d95f11938aa9/cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", size = 3210957 }, + { url = "https://files.pythonhosted.org/packages/99/10/173be140714d2ebaea8b641ff801cbcb3ef23101a2981cbf08057876f89e/cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb", size = 3396886 }, + { url = "https://files.pythonhosted.org/packages/2f/b4/424ea2d0fce08c24ede307cead3409ecbfc2f566725d4701b9754c0a1174/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41", size = 3892387 }, + { url = "https://files.pythonhosted.org/packages/28/20/8eaa1a4f7c68a1cb15019dbaad59c812d4df4fac6fd5f7b0b9c5177f1edd/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562", size = 4109922 }, + { url = "https://files.pythonhosted.org/packages/11/25/5ed9a17d532c32b3bc81cc294d21a36c772d053981c22bd678396bc4ae30/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5", size = 3895715 }, + { url = "https://files.pythonhosted.org/packages/63/31/2aac03b19c6329b62c45ba4e091f9de0b8f687e1b0cd84f101401bece343/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa", size = 4109876 }, + { url = "https://files.pythonhosted.org/packages/99/ec/6e560908349843718db1a782673f36852952d52a55ab14e46c42c8a7690a/cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d", size = 3131719 }, + { url = "https://files.pythonhosted.org/packages/d6/d7/f30e75a6aa7d0f65031886fa4a1485c2fbfe25a1896953920f6a9cfe2d3b/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d", size = 3887513 }, + { url = "https://files.pythonhosted.org/packages/9c/b4/7a494ce1032323ca9db9a3661894c66e0d7142ad2079a4249303402d8c71/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471", size = 4107432 }, + { url = "https://files.pythonhosted.org/packages/45/f8/6b3ec0bc56123b344a8d2b3264a325646d2dcdbdd9848b5e6f3d37db90b3/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615", size = 3891421 }, + { url = "https://files.pythonhosted.org/packages/57/ff/f3b4b2d007c2a646b0f69440ab06224f9cf37a977a72cdb7b50632174e8a/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390", size = 4107081 }, +] + +[[package]] +name = "dask" +version = "2024.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "cloudpickle" }, + { name = "fsspec" }, + { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, + { name = "packaging" }, + { name = "partd" }, + { name = "pyyaml" }, + { name = "toolz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b8/0a/1011ce75bc1e2da627d481f4bfcaf0eaf3367eb2e432ed908620e0a0fcfa/dask-2024.12.0.tar.gz", hash = "sha256:ffd02b06ac06b993df0b48e0ba4fe02abceb5c8b34b40bd91d63f33ec7a272a4", size = 10691490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/46/9ea782eb1accabad0931d80818fcf8b286bdd1b457ae9c718f039430c489/dask-2024.12.0-py3-none-any.whl", hash = "sha256:e038e87b9f06e7927b81ecde6cf2b49aa699bb902fec11abba5697cb48baeb8d", size = 1268483 }, +] + +[package.optional-dependencies] +distributed = [ + { name = "distributed" }, +] + +[[package]] +name = "dill" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/43/86fe3f9e130c4137b0f1b50784dd70a5087b911fe07fa81e53e0c4c47fea/dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c", size = 187000 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a", size = 119418 }, +] + +[[package]] +name = "distributed" +version = "2024.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "cloudpickle" }, + { name = "dask" }, + { name = "jinja2" }, + { name = "locket" }, + { name = "msgpack" }, + { name = "packaging" }, + { name = "psutil" }, + { name = "pyyaml" }, + { name = "sortedcontainers" }, + { name = "tblib" }, + { name = "toolz" }, + { name = "tornado" }, + { name = "urllib3" }, + { name = "zict" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/6e/463536dba2893634537f62ef00abf9421bf55f867892fc7c47d865c27d85/distributed-2024.12.0.tar.gz", hash = "sha256:6a2c04e63d31973ee3c1f2160d66521ed8f08e637d6a25a450e7561582920f38", size = 1115740 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/ef/f0a96745cabeda5735d456403a007c2f9b6ac64a0f4c5fa4faf9d41f70dc/distributed-2024.12.0-py3-none-any.whl", hash = "sha256:ed05aa13b6c62b69b33d1ba7d1ca95e78406c8f37163fafd07f7ca94ae036b66", size = 1022908 }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 }, +] + +[[package]] +name = "fsspec" +version = "2025.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/34/f4/5721faf47b8c499e776bc34c6a8fc17efdf7fdef0b00f398128bc5dcb4ac/fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972", size = 298491 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/53/eb690efa8513166adef3e0669afd31e95ffde69fb3c52ec2ac7223ed6018/fsspec-2025.3.0-py3-none-any.whl", hash = "sha256:efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3", size = 193615 }, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/90/5234a78dc0ef6496a6eb97b67a42a8e96742a56f7dc808cb954a85390448/greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563", size = 271235 }, + { url = "https://files.pythonhosted.org/packages/7c/16/cd631fa0ab7d06ef06387135b7549fdcc77d8d859ed770a0d28e47b20972/greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83", size = 637168 }, + { url = "https://files.pythonhosted.org/packages/2f/b1/aed39043a6fec33c284a2c9abd63ce191f4f1a07319340ffc04d2ed3256f/greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0", size = 648826 }, + { url = "https://files.pythonhosted.org/packages/76/25/40e0112f7f3ebe54e8e8ed91b2b9f970805143efef16d043dfc15e70f44b/greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120", size = 644443 }, + { url = "https://files.pythonhosted.org/packages/fb/2f/3850b867a9af519794784a7eeed1dd5bc68ffbcc5b28cef703711025fd0a/greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc", size = 643295 }, + { url = "https://files.pythonhosted.org/packages/cf/69/79e4d63b9387b48939096e25115b8af7cd8a90397a304f92436bcb21f5b2/greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617", size = 599544 }, + { url = "https://files.pythonhosted.org/packages/46/1d/44dbcb0e6c323bd6f71b8c2f4233766a5faf4b8948873225d34a0b7efa71/greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7", size = 1125456 }, + { url = "https://files.pythonhosted.org/packages/e0/1d/a305dce121838d0278cee39d5bb268c657f10a5363ae4b726848f833f1bb/greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6", size = 1149111 }, + { url = "https://files.pythonhosted.org/packages/96/28/d62835fb33fb5652f2e98d34c44ad1a0feacc8b1d3f1aecab035f51f267d/greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80", size = 298392 }, + { url = "https://files.pythonhosted.org/packages/28/62/1c2665558618553c42922ed47a4e6d6527e2fa3516a8256c2f431c5d0441/greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", size = 272479 }, + { url = "https://files.pythonhosted.org/packages/76/9d/421e2d5f07285b6e4e3a676b016ca781f63cfe4a0cd8eaecf3fd6f7a71ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", size = 640404 }, + { url = "https://files.pythonhosted.org/packages/e5/de/6e05f5c59262a584e502dd3d261bbdd2c97ab5416cc9c0b91ea38932a901/greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", size = 652813 }, + { url = "https://files.pythonhosted.org/packages/49/93/d5f93c84241acdea15a8fd329362c2c71c79e1a507c3f142a5d67ea435ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", size = 648517 }, + { url = "https://files.pythonhosted.org/packages/15/85/72f77fc02d00470c86a5c982b8daafdf65d38aefbbe441cebff3bf7037fc/greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", size = 647831 }, + { url = "https://files.pythonhosted.org/packages/f7/4b/1c9695aa24f808e156c8f4813f685d975ca73c000c2a5056c514c64980f6/greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", size = 602413 }, + { url = "https://files.pythonhosted.org/packages/76/70/ad6e5b31ef330f03b12559d19fda2606a522d3849cde46b24f223d6d1619/greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", size = 1129619 }, + { url = "https://files.pythonhosted.org/packages/f4/fb/201e1b932e584066e0f0658b538e73c459b34d44b4bd4034f682423bc801/greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", size = 1155198 }, + { url = "https://files.pythonhosted.org/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", size = 298930 }, + { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 }, + { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 }, + { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 }, + { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 }, + { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 }, + { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 }, + { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 }, + { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 }, + { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 }, + { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 }, + { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 }, + { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 }, + { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 }, + { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 }, + { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 }, + { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 }, + { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 }, + { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 }, + { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 }, + { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 }, + { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 }, + { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 }, + { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 }, + { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/08/c1395a292bb23fd03bdf572a1357c5a733d3eecbab877641ceacab23db6e/importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580", size = 55767 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/9d/0fb148dc4d6fa4a7dd1d8378168d9b4cd8d4560a6fbf6f0121c5fc34eb68/importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e", size = 26971 }, +] + +[[package]] +name = "isort" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186 }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 }, +] + +[[package]] +name = "locket" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/83/97b29fe05cb6ae28d2dbd30b81e2e402a3eed5f460c26e9eaa5895ceacf5/locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632", size = 4350 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3", size = 4398 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "msgpack" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/d0/7555686ae7ff5731205df1012ede15dd9d927f6227ea151e901c7406af4f/msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e", size = 167260 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/f9/a892a6038c861fa849b11a2bb0502c07bc698ab6ea53359e5771397d883b/msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd", size = 150428 }, + { url = "https://files.pythonhosted.org/packages/df/7a/d174cc6a3b6bb85556e6a046d3193294a92f9a8e583cdbd46dc8a1d7e7f4/msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d", size = 84131 }, + { url = "https://files.pythonhosted.org/packages/08/52/bf4fbf72f897a23a56b822997a72c16de07d8d56d7bf273242f884055682/msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5", size = 81215 }, + { url = "https://files.pythonhosted.org/packages/02/95/dc0044b439b518236aaf012da4677c1b8183ce388411ad1b1e63c32d8979/msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5", size = 371229 }, + { url = "https://files.pythonhosted.org/packages/ff/75/09081792db60470bef19d9c2be89f024d366b1e1973c197bb59e6aabc647/msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e", size = 378034 }, + { url = "https://files.pythonhosted.org/packages/32/d3/c152e0c55fead87dd948d4b29879b0f14feeeec92ef1fd2ec21b107c3f49/msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b", size = 363070 }, + { url = "https://files.pythonhosted.org/packages/d9/2c/82e73506dd55f9e43ac8aa007c9dd088c6f0de2aa19e8f7330e6a65879fc/msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f", size = 359863 }, + { url = "https://files.pythonhosted.org/packages/cb/a0/3d093b248837094220e1edc9ec4337de3443b1cfeeb6e0896af8ccc4cc7a/msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68", size = 368166 }, + { url = "https://files.pythonhosted.org/packages/e4/13/7646f14f06838b406cf5a6ddbb7e8dc78b4996d891ab3b93c33d1ccc8678/msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b", size = 370105 }, + { url = "https://files.pythonhosted.org/packages/67/fa/dbbd2443e4578e165192dabbc6a22c0812cda2649261b1264ff515f19f15/msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044", size = 68513 }, + { url = "https://files.pythonhosted.org/packages/24/ce/c2c8fbf0ded750cb63cbcbb61bc1f2dfd69e16dca30a8af8ba80ec182dcd/msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f", size = 74687 }, + { url = "https://files.pythonhosted.org/packages/b7/5e/a4c7154ba65d93be91f2f1e55f90e76c5f91ccadc7efc4341e6f04c8647f/msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7", size = 150803 }, + { url = "https://files.pythonhosted.org/packages/60/c2/687684164698f1d51c41778c838d854965dd284a4b9d3a44beba9265c931/msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa", size = 84343 }, + { url = "https://files.pythonhosted.org/packages/42/ae/d3adea9bb4a1342763556078b5765e666f8fdf242e00f3f6657380920972/msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701", size = 81408 }, + { url = "https://files.pythonhosted.org/packages/dc/17/6313325a6ff40ce9c3207293aee3ba50104aed6c2c1559d20d09e5c1ff54/msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6", size = 396096 }, + { url = "https://files.pythonhosted.org/packages/a8/a1/ad7b84b91ab5a324e707f4c9761633e357820b011a01e34ce658c1dda7cc/msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59", size = 403671 }, + { url = "https://files.pythonhosted.org/packages/bb/0b/fd5b7c0b308bbf1831df0ca04ec76fe2f5bf6319833646b0a4bd5e9dc76d/msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0", size = 387414 }, + { url = "https://files.pythonhosted.org/packages/f0/03/ff8233b7c6e9929a1f5da3c7860eccd847e2523ca2de0d8ef4878d354cfa/msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e", size = 383759 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/eb82e1fed5a16dddd9bc75f0854b6e2fe86c0259c4353666d7fab37d39f4/msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6", size = 394405 }, + { url = "https://files.pythonhosted.org/packages/90/2e/962c6004e373d54ecf33d695fb1402f99b51832631e37c49273cc564ffc5/msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5", size = 396041 }, + { url = "https://files.pythonhosted.org/packages/f8/20/6e03342f629474414860c48aeffcc2f7f50ddaf351d95f20c3f1c67399a8/msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88", size = 68538 }, + { url = "https://files.pythonhosted.org/packages/aa/c4/5a582fc9a87991a3e6f6800e9bb2f3c82972912235eb9539954f3e9997c7/msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788", size = 74871 }, + { url = "https://files.pythonhosted.org/packages/e1/d6/716b7ca1dbde63290d2973d22bbef1b5032ca634c3ff4384a958ec3f093a/msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d", size = 152421 }, + { url = "https://files.pythonhosted.org/packages/70/da/5312b067f6773429cec2f8f08b021c06af416bba340c912c2ec778539ed6/msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2", size = 85277 }, + { url = "https://files.pythonhosted.org/packages/28/51/da7f3ae4462e8bb98af0d5bdf2707f1b8c65a0d4f496e46b6afb06cbc286/msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420", size = 82222 }, + { url = "https://files.pythonhosted.org/packages/33/af/dc95c4b2a49cff17ce47611ca9ba218198806cad7796c0b01d1e332c86bb/msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2", size = 392971 }, + { url = "https://files.pythonhosted.org/packages/f1/54/65af8de681fa8255402c80eda2a501ba467921d5a7a028c9c22a2c2eedb5/msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39", size = 401403 }, + { url = "https://files.pythonhosted.org/packages/97/8c/e333690777bd33919ab7024269dc3c41c76ef5137b211d776fbb404bfead/msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f", size = 385356 }, + { url = "https://files.pythonhosted.org/packages/57/52/406795ba478dc1c890559dd4e89280fa86506608a28ccf3a72fbf45df9f5/msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247", size = 383028 }, + { url = "https://files.pythonhosted.org/packages/e7/69/053b6549bf90a3acadcd8232eae03e2fefc87f066a5b9fbb37e2e608859f/msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c", size = 391100 }, + { url = "https://files.pythonhosted.org/packages/23/f0/d4101d4da054f04274995ddc4086c2715d9b93111eb9ed49686c0f7ccc8a/msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b", size = 394254 }, + { url = "https://files.pythonhosted.org/packages/1c/12/cf07458f35d0d775ff3a2dc5559fa2e1fcd06c46f1ef510e594ebefdca01/msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b", size = 69085 }, + { url = "https://files.pythonhosted.org/packages/73/80/2708a4641f7d553a63bc934a3eb7214806b5b39d200133ca7f7afb0a53e8/msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f", size = 75347 }, + { url = "https://files.pythonhosted.org/packages/c8/b0/380f5f639543a4ac413e969109978feb1f3c66e931068f91ab6ab0f8be00/msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf", size = 151142 }, + { url = "https://files.pythonhosted.org/packages/c8/ee/be57e9702400a6cb2606883d55b05784fada898dfc7fd12608ab1fdb054e/msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330", size = 84523 }, + { url = "https://files.pythonhosted.org/packages/7e/3a/2919f63acca3c119565449681ad08a2f84b2171ddfcff1dba6959db2cceb/msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734", size = 81556 }, + { url = "https://files.pythonhosted.org/packages/7c/43/a11113d9e5c1498c145a8925768ea2d5fce7cbab15c99cda655aa09947ed/msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e", size = 392105 }, + { url = "https://files.pythonhosted.org/packages/2d/7b/2c1d74ca6c94f70a1add74a8393a0138172207dc5de6fc6269483519d048/msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca", size = 399979 }, + { url = "https://files.pythonhosted.org/packages/82/8c/cf64ae518c7b8efc763ca1f1348a96f0e37150061e777a8ea5430b413a74/msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915", size = 383816 }, + { url = "https://files.pythonhosted.org/packages/69/86/a847ef7a0f5ef3fa94ae20f52a4cacf596a4e4a010197fbcc27744eb9a83/msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d", size = 380973 }, + { url = "https://files.pythonhosted.org/packages/aa/90/c74cf6e1126faa93185d3b830ee97246ecc4fe12cf9d2d31318ee4246994/msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434", size = 387435 }, + { url = "https://files.pythonhosted.org/packages/7a/40/631c238f1f338eb09f4acb0f34ab5862c4e9d7eda11c1b685471a4c5ea37/msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c", size = 399082 }, + { url = "https://files.pythonhosted.org/packages/e9/1b/fa8a952be252a1555ed39f97c06778e3aeb9123aa4cccc0fd2acd0b4e315/msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc", size = 69037 }, + { url = "https://files.pythonhosted.org/packages/b6/bc/8bd826dd03e022153bfa1766dcdec4976d6c818865ed54223d71f07862b3/msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f", size = 75140 }, +] + +[[package]] +name = "mypy-boto3-ec2" +version = "1.37.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/04/02de9b2742d89cf270fbe3671be07e203bb37d1ca4d831aa88f5d918f368/mypy_boto3_ec2-1.37.9.tar.gz", hash = "sha256:2d48e47017640b9e32485fdad648d067112fb737eede70b9d02c79a4204fcc2f", size = 387786 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/e8/842fe65faad1462f4344a58bc604ee2b630461d5f70d5e5ba8473bf554a3/mypy_boto3_ec2-1.37.9-py3-none-any.whl", hash = "sha256:cb9a1977e455cbcb2f34966ff3ecc14ec2f3ffebd4cb3cdf53075cd8242f9291", size = 377501 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "paramiko" +version = "3.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bcrypt" }, + { name = "cryptography" }, + { name = "pynacl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/15/ad6ce226e8138315f2451c2aeea985bf35ee910afb477bae7477dc3a8f3b/paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822", size = 1566110 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/f8/c7bd0ef12954a81a1d3cea60a13946bd9a49a0036a5927770c461eade7ae/paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61", size = 227298 }, +] + +[[package]] +name = "parse" +version = "1.20.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/78/d9b09ba24bb36ef8b83b71be547e118d46214735b6dfb39e4bfde0e9b9dd/parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce", size = 29391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/31/ba45bf0b2aa7898d81cbbfac0e88c267befb59ad91a19e36e1bc5578ddb1/parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558", size = 20126 }, +] + +[[package]] +name = "partd" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "locket" }, + { name = "toolz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/3a/3f06f34820a31257ddcabdfafc2672c5816be79c7e353b02c1f318daa7d4/partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c", size = 21029 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f", size = 18905 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "psutil" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 }, + { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 }, + { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 }, + { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 }, + { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 }, + { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 }, + { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 }, + { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 }, + { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 }, + { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 }, + { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 }, + { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 }, + { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 }, + { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 }, + { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 }, + { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 }, + { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 }, + { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 }, + { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 }, + { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 }, + { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 }, + { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 }, + { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 }, + { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 }, + { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 }, + { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 }, + { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 }, + { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, + { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, + { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, + { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, + { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, + { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, + { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, + { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, + { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, + { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, + { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, + { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, + { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, + { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, + { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 }, + { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 }, + { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 }, + { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 }, + { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 }, + { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 }, + { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 }, + { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 }, + { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 }, + { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 }, + { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 }, + { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 }, + { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 }, + { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 }, + { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 }, + { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 }, + { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 }, + { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 }, + { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 }, + { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 }, + { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 }, + { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 }, + { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, +] + +[[package]] +name = "pylint" +version = "3.3.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "astroid" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "dill" }, + { name = "isort" }, + { name = "mccabe" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomlkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/e7/3616e8caa61f918c4864db075800a6bd7422621618045c188fd45c3f7a2b/pylint-3.3.5.tar.gz", hash = "sha256:38d0f784644ed493d91f76b5333a0e370a1c1bc97c22068a77523b4bf1e82c31", size = 1519168 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/62/42199570fc199cc0f6825d746ddb0183b30739b334dc6d85edeaa8a2073c/pylint-3.3.5-py3-none-any.whl", hash = "sha256:7cb170929a371238530b2eeea09f5f28236d106b70308c3d46a9c0cf11634633", size = 522215 }, +] + +[[package]] +name = "pynacl" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", size = 3392854 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", size = 349920 }, + { url = "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", size = 601722 }, + { url = "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", size = 680087 }, + { url = "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", size = 856678 }, + { url = "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", size = 1133660 }, + { url = "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", size = 663824 }, + { url = "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", size = 1117912 }, + { url = "https://files.pythonhosted.org/packages/25/2d/b7df6ddb0c2a33afdb358f8af6ea3b8c4d1196ca45497dd37a56f0c122be/PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543", size = 204624 }, + { url = "https://files.pythonhosted.org/packages/5e/22/d3db169895faaf3e2eda892f005f433a62db2decbcfbc2f61e6517adfa87/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93", size = 212141 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "resolvelib" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/10/f699366ce577423cbc3df3280063099054c23df70856465080798c6ebad6/resolvelib-1.0.1.tar.gz", hash = "sha256:04ce76cbd63fded2078ce224785da6ecd42b9564b1390793f64ddecbe997b309", size = 21065 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/fc/e9ccf0521607bcd244aa0b3fbd574f71b65e9ce6a112c83af988bbbe2e23/resolvelib-1.0.1-py2.py3-none-any.whl", hash = "sha256:d2da45d1a8dfee81bdd591647783e340ef3bcb104b54c383f70d422ef5cc7dbf", size = 17194 }, +] + +[[package]] +name = "rich" +version = "13.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, +] + +[[package]] +name = "s3transfer" +version = "0.11.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/ec/aa1a215e5c126fe5decbee2e107468f51d9ce190b9763cb649f76bb45938/s3transfer-0.11.4.tar.gz", hash = "sha256:559f161658e1cf0a911f45940552c696735f5c74e64362e515f333ebed87d679", size = 148419 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/62/8d3fc3ec6640161a5649b2cddbbf2b9fa39c92541225b33f117c37c5a2eb/s3transfer-0.11.4-py3-none-any.whl", hash = "sha256:ac265fa68318763a03bf2dc4f39d5cbd6a9e178d81cc9483ad27da33637e320d", size = 84412 }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.38" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/08/9a90962ea72acd532bda71249a626344d855c4032603924b1b547694b837/sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb", size = 9634782 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/10/16ed1503e18c0ec4e17a1819ff44604368607eed3db1e1d89d33269fe5b9/SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6", size = 2105151 }, + { url = "https://files.pythonhosted.org/packages/79/e5/2e9a0807cba2e625204d04bc39a18a47478e4bacae353ae8a7f2e784c341/SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444", size = 2096335 }, + { url = "https://files.pythonhosted.org/packages/c1/97/8fa5cc6ed994eab611dcf0bc431161308f297c6f896f02a3ebb8d8aa06ea/SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:402c2316d95ed90d3d3c25ad0390afa52f4d2c56b348f212aa9c8d072a40eee5", size = 3078705 }, + { url = "https://files.pythonhosted.org/packages/a9/99/505feb8a9bc7027addaa2b312b8b306319cacbbd8a5231c4123ca1fa082a/SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6493bc0eacdbb2c0f0d260d8988e943fee06089cd239bd7f3d0c45d1657a70e2", size = 3086958 }, + { url = "https://files.pythonhosted.org/packages/39/26/fb7cef8198bb2627ac527b2cf6c576588db09856d634d4f1017280f8ab64/SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0561832b04c6071bac3aad45b0d3bb6d2c4f46a8409f0a7a9c9fa6673b41bc03", size = 3042798 }, + { url = "https://files.pythonhosted.org/packages/cc/7c/b6f9e0ee4e8e993fdce42477f9290b2b8373e672fb1dc0272179f0aeafb4/SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49aa2cdd1e88adb1617c672a09bf4ebf2f05c9448c6dbeba096a3aeeb9d4d443", size = 3068318 }, + { url = "https://files.pythonhosted.org/packages/e6/22/903497e8202960c4249ffc340ec8de63f7fbdd4856bdfe854f617e124e90/SQLAlchemy-2.0.38-cp310-cp310-win32.whl", hash = "sha256:64aa8934200e222f72fcfd82ee71c0130a9c07d5725af6fe6e919017d095b297", size = 2077434 }, + { url = "https://files.pythonhosted.org/packages/20/a8/08f6ceccff5e0abb4a22e2e91c44b0e39911fda06b5d0c905dfc642de57a/SQLAlchemy-2.0.38-cp310-cp310-win_amd64.whl", hash = "sha256:c57b8e0841f3fce7b703530ed70c7c36269c6d180ea2e02e36b34cb7288c50c7", size = 2101608 }, + { url = "https://files.pythonhosted.org/packages/00/6c/9d3a638f297fce288ba12a4e5dbd08ef1841d119abee9300c100eba00217/SQLAlchemy-2.0.38-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf89e0e4a30714b357f5d46b6f20e0099d38b30d45fa68ea48589faf5f12f62d", size = 2106330 }, + { url = "https://files.pythonhosted.org/packages/0e/57/d5fdee56f418491267701965795805662b1744de40915d4764451390536d/SQLAlchemy-2.0.38-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8455aa60da49cb112df62b4721bd8ad3654a3a02b9452c783e651637a1f21fa2", size = 2096730 }, + { url = "https://files.pythonhosted.org/packages/42/84/205f423f8b28329c47237b7e130a7f93c234a49fab20b4534bd1ff26a06a/SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f53c0d6a859b2db58332e0e6a921582a02c1677cc93d4cbb36fdf49709b327b2", size = 3215023 }, + { url = "https://files.pythonhosted.org/packages/77/41/94a558d47bffae5a361b0cfb3721324ea4154829dd5432f80bd4cfeecbc9/SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c4817dff8cef5697f5afe5fec6bc1783994d55a68391be24cb7d80d2dbc3a6", size = 3214991 }, + { url = "https://files.pythonhosted.org/packages/74/a0/cc3c030e7440bd17ce67c1875f50edb41d0ef17b9c76fbc290ef27bbe37f/SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9cea5b756173bb86e2235f2f871b406a9b9d722417ae31e5391ccaef5348f2c", size = 3151854 }, + { url = "https://files.pythonhosted.org/packages/24/ab/8ba2588c2eb1d092944551354d775ef4fc0250badede324d786a4395d10e/SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40e9cdbd18c1f84631312b64993f7d755d85a3930252f6276a77432a2b25a2f3", size = 3172158 }, + { url = "https://files.pythonhosted.org/packages/e0/73/2a3d6217e8e6abb553ed410ce5adc0bdec7effd684716f0fbaee5831d677/SQLAlchemy-2.0.38-cp311-cp311-win32.whl", hash = "sha256:cb39ed598aaf102251483f3e4675c5dd6b289c8142210ef76ba24aae0a8f8aba", size = 2076965 }, + { url = "https://files.pythonhosted.org/packages/a4/17/364a99c8c5698492c7fa40fc463bf388f05b0b03b74028828b71a79dc89d/SQLAlchemy-2.0.38-cp311-cp311-win_amd64.whl", hash = "sha256:f9d57f1b3061b3e21476b0ad5f0397b112b94ace21d1f439f2db472e568178ae", size = 2102169 }, + { url = "https://files.pythonhosted.org/packages/5a/f8/6d0424af1442c989b655a7b5f608bc2ae5e4f94cdf6df9f6054f629dc587/SQLAlchemy-2.0.38-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12d5b06a1f3aeccf295a5843c86835033797fea292c60e72b07bcb5d820e6dd3", size = 2104927 }, + { url = "https://files.pythonhosted.org/packages/25/80/fc06e65fca0a19533e2bfab633a5633ed8b6ee0b9c8d580acf84609ce4da/SQLAlchemy-2.0.38-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e036549ad14f2b414c725349cce0772ea34a7ab008e9cd67f9084e4f371d1f32", size = 2095317 }, + { url = "https://files.pythonhosted.org/packages/98/2d/5d66605f76b8e344813237dc160a01f03b987201e974b46056a7fb94a874/SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3bee874cb1fadee2ff2b79fc9fc808aa638670f28b2145074538d4a6a5028e", size = 3244735 }, + { url = "https://files.pythonhosted.org/packages/73/8d/b0539e8dce90861efc38fea3eefb15a5d0cfeacf818614762e77a9f192f9/SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e185ea07a99ce8b8edfc788c586c538c4b1351007e614ceb708fd01b095ef33e", size = 3255581 }, + { url = "https://files.pythonhosted.org/packages/ac/a5/94e1e44bf5bdffd1782807fcc072542b110b950f0be53f49e68b5f5eca1b/SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b79ee64d01d05a5476d5cceb3c27b5535e6bb84ee0f872ba60d9a8cd4d0e6579", size = 3190877 }, + { url = "https://files.pythonhosted.org/packages/91/13/f08b09996dce945aec029c64f61c13b4788541ac588d9288e31e0d3d8850/SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afd776cf1ebfc7f9aa42a09cf19feadb40a26366802d86c1fba080d8e5e74bdd", size = 3217485 }, + { url = "https://files.pythonhosted.org/packages/13/8f/8cfe2ba5ba6d8090f4de0e658330c53be6b7bf430a8df1b141c2b180dcdf/SQLAlchemy-2.0.38-cp312-cp312-win32.whl", hash = "sha256:a5645cd45f56895cfe3ca3459aed9ff2d3f9aaa29ff7edf557fa7a23515a3725", size = 2075254 }, + { url = "https://files.pythonhosted.org/packages/c2/5c/e3c77fae41862be1da966ca98eec7fbc07cdd0b00f8b3e1ef2a13eaa6cca/SQLAlchemy-2.0.38-cp312-cp312-win_amd64.whl", hash = "sha256:1052723e6cd95312f6a6eff9a279fd41bbae67633415373fdac3c430eca3425d", size = 2100865 }, + { url = "https://files.pythonhosted.org/packages/21/77/caa875a1f5a8a8980b564cc0e6fee1bc992d62d29101252561d0a5e9719c/SQLAlchemy-2.0.38-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ecef029b69843b82048c5b347d8e6049356aa24ed644006c9a9d7098c3bd3bfd", size = 2100201 }, + { url = "https://files.pythonhosted.org/packages/f4/ec/94bb036ec78bf9a20f8010c807105da9152dd84f72e8c51681ad2f30b3fd/SQLAlchemy-2.0.38-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c8bcad7fc12f0cc5896d8e10fdf703c45bd487294a986903fe032c72201596b", size = 2090678 }, + { url = "https://files.pythonhosted.org/packages/7b/61/63ff1893f146e34d3934c0860209fdd3925c25ee064330e6c2152bacc335/SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0ef3f98175d77180ffdc623d38e9f1736e8d86b6ba70bff182a7e68bed7727", size = 3177107 }, + { url = "https://files.pythonhosted.org/packages/a9/4f/b933bea41a602b5f274065cc824fae25780ed38664d735575192490a021b/SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ac78898c50e2574e9f938d2e5caa8fe187d7a5b69b65faa1ea4648925b096", size = 3190435 }, + { url = "https://files.pythonhosted.org/packages/f5/23/9e654b4059e385988de08c5d3b38a369ea042f4c4d7c8902376fd737096a/SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eb4fa13c8c7a2404b6a8e3772c17a55b1ba18bc711e25e4d6c0c9f5f541b02a", size = 3123648 }, + { url = "https://files.pythonhosted.org/packages/83/59/94c6d804e76ebc6412a08d2b086a8cb3e5a056cd61508e18ddaf3ec70100/SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86", size = 3151789 }, + { url = "https://files.pythonhosted.org/packages/b2/27/17f143013aabbe1256dce19061eafdce0b0142465ce32168cdb9a18c04b1/SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120", size = 2073023 }, + { url = "https://files.pythonhosted.org/packages/e2/3e/259404b03c3ed2e7eee4c179e001a07d9b61070334be91124cf4ad32eec7/SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda", size = 2096908 }, + { url = "https://files.pythonhosted.org/packages/aa/e4/592120713a314621c692211eba034d09becaf6bc8848fabc1dc2a54d8c16/SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753", size = 1896347 }, +] + +[package.optional-dependencies] +asyncio = [ + { name = "greenlet" }, +] + +[[package]] +name = "sshtunnel" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "paramiko" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8d/ad/4c587adf79865be268ee0b6bd52cfaa7a75d827a23ced072dc5ab554b4af/sshtunnel-0.4.0.tar.gz", hash = "sha256:e7cb0ea774db81bf91844db22de72a40aae8f7b0f9bb9ba0f666d474ef6bf9fc", size = 62716 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/13/8476c4328dcadfe26f8bd7f3a1a03bf9ddb890a7e7b692f54a179bc525bf/sshtunnel-0.4.0-py2.py3-none-any.whl", hash = "sha256:98e54c26f726ab8bd42b47a3a21fca5c3e60f58956f0f70de2fb8ab0046d0606", size = 24729 }, +] + +[[package]] +name = "tblib" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/df/4f2cd7eaa6d41a7994d46527349569d46e34d9cdd07590b5c5b0dcf53de3/tblib-3.0.0.tar.gz", hash = "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6", size = 30616 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/87/ce70db7cae60e67851eb94e1a2127d4abb573d3866d2efd302ceb0d4d2a5/tblib-3.0.0-py3-none-any.whl", hash = "sha256:80a6c77e59b55e83911e1e607c649836a69c103963c5f28a46cbeef44acf8129", size = 12478 }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 }, +] + +[[package]] +name = "toolz" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/0b/d80dfa675bf592f636d1ea0b835eab4ec8df6e9415d8cfd766df54456123/toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02", size = 66790 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/98/eb27cc78ad3af8e302c9d8ff4977f5026676e130d28dd7578132a457170c/toolz-1.0.0-py3-none-any.whl", hash = "sha256:292c8f1c4e7516bf9086f8850935c799a874039c8bcf959d47b600e4c44a6236", size = 56383 }, +] + +[[package]] +name = "tornado" +version = "6.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/45/a0daf161f7d6f36c3ea5fc0c2de619746cc3dd4c76402e9db545bd920f63/tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b", size = 501135 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/7e/71f604d8cea1b58f82ba3590290b66da1e72d840aeb37e0d5f7291bd30db/tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1", size = 436299 }, + { url = "https://files.pythonhosted.org/packages/96/44/87543a3b99016d0bf54fdaab30d24bf0af2e848f1d13d34a3a5380aabe16/tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803", size = 434253 }, + { url = "https://files.pythonhosted.org/packages/cb/fb/fdf679b4ce51bcb7210801ef4f11fdac96e9885daa402861751353beea6e/tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec", size = 437602 }, + { url = "https://files.pythonhosted.org/packages/4f/3b/e31aeffffc22b475a64dbeb273026a21b5b566f74dee48742817626c47dc/tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946", size = 436972 }, + { url = "https://files.pythonhosted.org/packages/22/55/b78a464de78051a30599ceb6983b01d8f732e6f69bf37b4ed07f642ac0fc/tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf", size = 437173 }, + { url = "https://files.pythonhosted.org/packages/79/5e/be4fb0d1684eb822c9a62fb18a3e44a06188f78aa466b2ad991d2ee31104/tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634", size = 437892 }, + { url = "https://files.pythonhosted.org/packages/f5/33/4f91fdd94ea36e1d796147003b490fe60a0215ac5737b6f9c65e160d4fe0/tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73", size = 437334 }, + { url = "https://files.pythonhosted.org/packages/2b/ae/c1b22d4524b0e10da2f29a176fb2890386f7bd1f63aacf186444873a88a0/tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c", size = 437261 }, + { url = "https://files.pythonhosted.org/packages/b5/25/36dbd49ab6d179bcfc4c6c093a51795a4f3bed380543a8242ac3517a1751/tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482", size = 438463 }, + { url = "https://files.pythonhosted.org/packages/61/cc/58b1adeb1bb46228442081e746fcdbc4540905c87e8add7c277540934edb/tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38", size = 438907 }, +] + +[[package]] +name = "typer" +version = "0.15.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 }, +] + +[[package]] +name = "types-awscrt" +version = "0.23.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/53/7c69677327794fe91cc89a1362400b78f00b1a20364384da1e004c259d42/types_awscrt-0.23.10.tar.gz", hash = "sha256:965659260599b421564204b895467684104a2c0311bbacfd3c2423b8b0d3f3e9", size = 15455 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/ad/3d7c9a8b972048f3987355e3e48da56eb9f3ed8e151113c3c973b43ad91e/types_awscrt-0.23.10-py3-none-any.whl", hash = "sha256:7391bf502f6093221e68da8fb6a2af7ec67a98d376c58d5b76cc3938f449d121", size = 19426 }, +] + +[[package]] +name = "types-boto3" +version = "1.37.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore-stubs" }, + { name = "types-s3transfer" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/aa/e922a5b13acad44d04391478224b1d46caf37565b8400d84c5365f28e277/types_boto3-1.37.9.tar.gz", hash = "sha256:2f56a0e6a1f99c6a430ff7a3487aa3bf861ec8277a71bafc23f84244bfe1ea66", size = 99316 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/80/acc3a4bc2407d9a9f9d41acfca674aeac6e55e9ee9a4db91c434cc0ca518/types_boto3-1.37.9-py3-none-any.whl", hash = "sha256:3e861832d9410362aea943456fd94c917c4927f9f56b360059d225c05985fe49", size = 68305 }, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20241206" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/60/47d92293d9bc521cd2301e423a358abfac0ad409b3a1606d8fbae1321961/types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb", size = 13802 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/b3/ca41df24db5eb99b00d97f89d7674a90cb6b3134c52fb8121b6d8d30f15c/types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53", size = 14384 }, +] + +[[package]] +name = "types-s3transfer" +version = "0.11.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/93/a9/440d8ba72a81bcf2cc5a56ef63f23b58ce93e7b9b62409697553bdcdd181/types_s3transfer-0.11.4.tar.gz", hash = "sha256:05fde593c84270f19fd053f0b1e08f5a057d7c5f036b9884e68fb8cd3041ac30", size = 14074 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/69/0b5ae42c3c33d31a32f7dcb9f35a3e327365360a6e4a2a7b491904bd38aa/types_s3transfer-0.11.4-py3-none-any.whl", hash = "sha256:2a76d92c07d4a3cb469e5343b2e7560e0b8078b2e03696a65407b8c44c861b61", size = 19516 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, +] + +[[package]] +name = "zict" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/ac/3c494dd7ec5122cff8252c1a209b282c0867af029f805ae9befd73ae37eb/zict-3.0.0.tar.gz", hash = "sha256:e321e263b6a97aafc0790c3cfb3c04656b7066e6738c37fffcca95d803c9fba5", size = 33238 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/ab/11a76c1e2126084fde2639514f24e6111b789b0bfa4fc6264a8975c7e1f1/zict-3.0.0-py2.py3-none-any.whl", hash = "sha256:5796e36bd0e0cc8cf0fbc1ace6a68912611c1dbd74750a3f3026b9b9d6a327ae", size = 43332 }, +] + +[[package]] +name = "zipp" +version = "3.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, +] diff --git a/services/agent/docker/boot.sh b/services/agent/docker/boot.sh index 259e1f54e71..74c058e55ec 100755 --- a/services/agent/docker/boot.sh +++ b/services/agent/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/agent - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt index 30d644e3a42..be74471a876 100644 --- a/services/agent/requirements/_base.txt +++ b/services/agent/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.0 +aio-pika==9.5.5 # via -r requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 # via -r requirements/../../../packages/service-library/requirements/_base.in @@ -10,9 +10,9 @@ aiodocker==0.24.0 # -r requirements/_base.in aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.5.0 # via aiohttp -aiohttp==3.11.7 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -29,11 +29,11 @@ aiohttp==3.11.7 # aiodocker aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp annotated-types==0.7.0 # via pydantic -anyio==4.6.2.post1 +anyio==4.8.0 # via # fast-depends # faststream @@ -46,12 +46,12 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -attrs==24.2.0 +attrs==25.1.0 # via # aiohttp # jsonschema # referencing -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -68,13 +68,13 @@ certifi==2024.8.30 # httpcore # httpx # requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests -click==8.1.7 +click==8.1.8 # via # typer # uvicorn -deprecated==1.2.15 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -88,21 +88,24 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.5 +fastapi==0.115.11 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in -faststream==0.5.31 + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +faststream==0.5.35 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.69.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -grpcio==1.68.0 +grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via @@ -110,7 +113,7 @@ h11==0.14.0 # uvicorn httpcore==1.0.7 # via httpx -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -148,7 +151,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.2 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -162,17 +165,17 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.30.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -180,29 +183,29 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.51b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.49b2 +opentelemetry-instrumentation-fastapi==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -211,13 +214,13 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests -orjson==3.10.12 +orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -245,23 +248,25 @@ packaging==24.2 # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq -prometheus-client==0.21.0 +prometheus-client==0.21.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # prometheus-fastapi-instrumentator -prometheus-fastapi-instrumentator==7.0.0 +prometheus-fastapi-instrumentator==7.0.2 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -propcache==0.2.0 +propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.28.3 +protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.0 +psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -pydantic==2.10.2 +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -291,9 +296,9 @@ pydantic==2.10.2 # fastapi # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.2 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -303,15 +308,27 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -pygments==2.18.0 +pygments==2.19.1 # via rich -pyinstrument==5.0.0 +pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -349,6 +366,18 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -358,19 +387,17 @@ rich==13.9.4 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.21.0 +rpds-py==0.23.1 # via # jsonschema # referencing shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -starlette==0.41.3 + # via anyio +starlette==0.46.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -386,21 +413,24 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.13.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -types-python-dateutil==2.9.0.20241003 +types-python-dateutil==2.9.0.20241206 # via arrow typing-extensions==4.12.2 # via # aiodebug + # anyio # fastapi # faststream # opentelemetry-sdk @@ -408,7 +438,7 @@ typing-extensions==4.12.2 # pydantic-core # pydantic-extra-types # typer -urllib3==2.2.3 +urllib3==2.3.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -423,17 +453,17 @@ urllib3==2.2.3 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests -uvicorn==0.32.1 +uvicorn==0.34.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in -wrapt==1.17.0 +wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis -yarl==1.18.0 +yarl==1.18.3 # via # -r requirements/../../../packages/service-library/requirements/_base.in # aio-pika diff --git a/services/agent/requirements/_test.txt b/services/agent/requirements/_test.txt index 7b5622e26b5..f31d57f0582 100644 --- a/services/agent/requirements/_test.txt +++ b/services/agent/requirements/_test.txt @@ -6,18 +6,18 @@ aiofiles==24.1.0 # via # -c requirements/_base.txt # aioboto3 -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.5.0 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.7 +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aiobotocore aioitertools==0.12.0 # via aiobotocore -aiosignal==1.3.1 +aiosignal==1.3.2 # via # -c requirements/_base.txt # aiohttp @@ -27,19 +27,19 @@ annotated-types==0.7.0 # pydantic antlr4-python3-runtime==4.13.2 # via moto -anyio==4.6.2.post1 +anyio==4.8.0 # via # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -attrs==24.2.0 +attrs==25.1.0 # via # -c requirements/_base.txt # aiohttp # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -58,7 +58,7 @@ botocore==1.35.81 # boto3 # moto # s3transfer -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -67,34 +67,34 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.28.0 # via moto -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto docker==7.1.0 # via moto -faker==35.0.0 +faker==36.2.2 # via -r requirements/_test.in flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto frozenlist==1.5.0 # via @@ -111,7 +111,7 @@ httpcore==1.0.7 # via # -c requirements/_base.txt # httpx -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -127,7 +127,7 @@ iniconfig==2.0.0 # via pytest itsdangerous==2.2.0 # via flask -jinja2==3.1.5 +jinja2==3.1.6 # via # -c requirements/../../../requirements/constraints.txt # flask @@ -136,9 +136,7 @@ jmespath==1.0.1 # via # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -165,10 +163,8 @@ markupsafe==3.0.2 # via # jinja2 # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy multidict==6.1.0 @@ -192,27 +188,27 @@ pluggy==1.5.0 # via pytest ply==3.11 # via jsonpath-ng -propcache==0.2.0 +propcache==0.3.0 # via # -c requirements/_base.txt # aiohttp # yarl -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.2 +pydantic==2.10.6 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via # -c requirements/_base.txt # pydantic pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -232,7 +228,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -243,7 +238,6 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -267,16 +261,16 @@ responses==0.25.6 # via moto rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.21.0 +rpds-py==0.23.1 # via # -c requirements/_base.txt # jsonschema # referencing s3transfer==0.10.4 # via boto3 -setuptools==75.8.0 +setuptools==75.8.2 # via moto -six==1.16.0 +six==1.17.0 # via # -c requirements/_base.txt # python-dateutil @@ -286,18 +280,19 @@ sniffio==1.3.1 # -c requirements/_base.txt # anyio # asgi-lifespan - # httpx sympy==1.13.3 # via cfn-lint typing-extensions==4.12.2 # via # -c requirements/_base.txt + # anyio # aws-sam-translator # cfn-lint - # faker # pydantic # pydantic-core -urllib3==2.2.3 +tzdata==2025.1 + # via faker +urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -308,15 +303,16 @@ urllib3==2.2.3 werkzeug==3.1.3 # via # flask + # flask-cors # moto -wrapt==1.17.0 +wrapt==1.17.2 # via # -c requirements/_base.txt # aiobotocore # aws-xray-sdk xmltodict==0.14.2 # via moto -yarl==1.18.0 +yarl==1.18.3 # via # -c requirements/_base.txt # aiohttp diff --git a/services/agent/requirements/_tools.txt b/services/agent/requirements/_tools.txt index f5005ce7a13..70694d84d7b 100644 --- a/services/agent/requirements/_tools.txt +++ b/services/agent/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -44,7 +44,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via # -c requirements/_test.txt # pip-tools @@ -80,7 +80,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.3 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index b0cfa8720e4..b4a5ce8cd48 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -5,7 +5,7 @@ get_common_oas_options, override_fastapi_openapi_method, ) -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from servicelib.logging_utils import config_all_loggers from .._meta import ( @@ -64,7 +64,7 @@ def create_app() -> FastAPI: setup_rpc_api_routes(app) if settings.AGENT_TRACING: - setup_tracing(app, settings.AGENT_TRACING, APP_NAME) + initialize_tracing(app, settings.AGENT_TRACING, APP_NAME) async def _on_startup() -> None: print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 diff --git a/services/agent/src/simcore_service_agent/services/volumes_manager.py b/services/agent/src/simcore_service_agent/services/volumes_manager.py index d26062a936e..860ab86d0e2 100644 --- a/services/agent/src/simcore_service_agent/services/volumes_manager.py +++ b/services/agent/src/simcore_service_agent/services/volumes_manager.py @@ -52,7 +52,7 @@ async def setup(self) -> None: task_name="volumes bookkeeping", ) self._task_periodic_volume_cleanup = create_periodic_task( - self._bookkeeping_task, + self._periodic_volume_cleanup_task, interval=self.volume_cleanup_interval, task_name="volume cleanup", ) @@ -96,7 +96,7 @@ async def _remove_volume_safe( requires_backup=requires_backup, ) - async def _periodic_volmue_cleanup_task(self) -> None: + async def _periodic_volume_cleanup_task(self) -> None: with log_context(_logger, logging.DEBUG, "volume cleanup"): volumes_to_remove: set[str] = set() for volume_name, inactive_since in self._unused_volumes.items(): diff --git a/services/agent/tests/unit/test_services_volumes_manager.py b/services/agent/tests/unit/test_services_volumes_manager.py index 4ac429aeca9..5fae32710df 100644 --- a/services/agent/tests/unit/test_services_volumes_manager.py +++ b/services/agent/tests/unit/test_services_volumes_manager.py @@ -165,7 +165,7 @@ async def test_volumes_manager_periodic_task_cleanup( ): async def _run_volumes_clennup() -> None: await volumes_manager._bookkeeping_task() # noqa: SLF001 - await volumes_manager._periodic_volmue_cleanup_task() # noqa: SLF001 + await volumes_manager._periodic_volume_cleanup_task() # noqa: SLF001 await _run_volumes_clennup() assert spy_remove_volume.call_count == 0 diff --git a/services/api-server/Makefile b/services/api-server/Makefile index 82263c83658..e923de11db8 100644 --- a/services/api-server/Makefile +++ b/services/api-server/Makefile @@ -91,3 +91,13 @@ APP_URL:=http://$(get_my_ip).nip.io:8006 test-api: ## Runs schemathesis against development server (NOTE: make up-devel first) @docker run schemathesis/schemathesis:stable run \ "$(APP_URL)/api/v0/openapi.json" + + +test-pacts: guard-PACT_BROKER_USERNAME guard-PACT_BROKER_PASSWORD guard-PACT_BROKER_URL _check_venv_active ## Test pacts + pytest tests/unit/pact_broker/test* + +# Usage: +# PACT_BROKER_USERNAME=your_username \ +# PACT_BROKER_PASSWORD=your_password \ +# PACT_BROKER_URL=your_broker_url \ +# make test-pacts diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index 38322883120..2f545af3a59 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -19,7 +19,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/api-server - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -27,7 +27,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # RUNNING application ---------------------------------------- diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index d70ef50e6bc..b7c0befbb47 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -805,9 +805,9 @@ "required": false, "schema": { "type": "integer", - "maximum": 100, + "maximum": 50, "minimum": 1, - "default": 50, + "default": 20, "title": "Limit" } }, @@ -3352,9 +3352,9 @@ "required": false, "schema": { "type": "integer", - "maximum": 100, + "maximum": 50, "minimum": 1, - "default": 50, + "default": 20, "title": "Limit" } }, @@ -4164,9 +4164,9 @@ "required": false, "schema": { "type": "integer", - "maximum": 100, + "maximum": 50, "minimum": 1, - "default": 50, + "default": 20, "title": "Limit" } }, @@ -5322,31 +5322,531 @@ } } }, + "/v0/wallets/{wallet_id}/licensed-items": { + "get": { + "tags": [ + "wallets" + ], + "summary": "Get Available Licensed Items For Wallet", + "description": "Get all available licensed items for a given wallet", + "operationId": "get_available_licensed_items_for_wallet", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "wallet_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Wallet Id" + } + }, + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 50, + "minimum": 1, + "default": 20, + "title": "Limit" + } + }, + { + "name": "offset", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0, + "default": 0, + "title": "Offset" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Page_LicensedItemGet_" + } + } + } + }, + "404": { + "description": "Wallet not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "403": { + "description": "Access to wallet is not allowed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/wallets/{wallet_id}/licensed-items/{licensed_item_id}/checkout": { + "post": { + "tags": [ + "wallets" + ], + "summary": "Checkout Licensed Item", + "description": "Checkout licensed item", + "operationId": "checkout_licensed_item", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "wallet_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Wallet Id" + } + }, + { + "name": "licensed_item_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Licensed Item Id" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LicensedItemCheckoutData" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LicensedItemCheckoutGet" + } + } + } + }, + "404": { + "description": "Wallet not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "403": { + "description": "Access to wallet is not allowed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, "/v0/credits/price": { "get": { "tags": [ "credits" ], - "summary": "Get Credits Price", - "description": "New in *version 0.6.0*", - "operationId": "get_credits_price", + "summary": "Get Credits Price", + "description": "New in *version 0.6.0*", + "operationId": "get_credits_price", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetCreditPriceLegacy" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } + }, + "/v0/licensed-items": { + "get": { + "tags": [ + "licensed-items" + ], + "summary": "Get Licensed Items", + "description": "Get all licensed items", + "operationId": "get_licensed_items", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 50, + "minimum": 1, + "default": 20, + "title": "Limit" + } + }, + { + "name": "offset", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0, + "default": 0, + "title": "Offset" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Page_LicensedItemGet_" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/licensed-items/{licensed_item_id}/checked-out-items/{licensed_item_checkout_id}/release": { + "post": { + "tags": [ + "licensed-items" + ], + "summary": "Release Licensed Item", + "description": "Release previously checked out licensed item", + "operationId": "release_licensed_item", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "licensed_item_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Licensed Item Id" + } + }, + { + "name": "licensed_item_checkout_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Licensed Item Checkout Id" + } + } + ], "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GetCreditPriceLegacy" + "$ref": "#/components/schemas/LicensedItemCheckoutGet" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" } } } } - }, - "security": [ - { - "HTTPBasic": [] - } - ] + } } } }, @@ -5960,131 +6460,460 @@ "int": "42", "str": "hej med dig" } - } + } + }, + "JobOutputs": { + "properties": { + "job_id": { + "type": "string", + "format": "uuid", + "title": "Job Id", + "description": "Job that produced this output" + }, + "results": { + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/components/schemas/File" + }, + { + "type": "number" + }, + { + "type": "integer" + }, + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": {}, + "type": "array" + }, + { + "type": "null" + } + ] + }, + "type": "object", + "title": "Results" + } + }, + "type": "object", + "required": [ + "job_id", + "results" + ], + "title": "JobOutputs", + "example": { + "job_id": "99d9ac65-9f10-4e2f-a433-b5e412bb037b", + "results": { + "enabled": false, + "maxSAR": 4.33, + "n": 55, + "output_file": { + "filename": "sar_matrix.txt", + "id": "0a3b2c56-dbcd-4871-b93b-d454b7883f9f" + }, + "title": "Specific Absorption Rate" + } + } + }, + "JobStatus": { + "properties": { + "job_id": { + "type": "string", + "format": "uuid", + "title": "Job Id" + }, + "state": { + "$ref": "#/components/schemas/RunningState" + }, + "progress": { + "type": "integer", + "maximum": 100, + "minimum": 0, + "title": "Progress", + "default": 0 + }, + "submitted_at": { + "type": "string", + "format": "date-time", + "title": "Submitted At", + "description": "Last modification timestamp of the solver job" + }, + "started_at": { + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "title": "Started At", + "description": "Timestamp that indicate the moment the solver starts execution or None if the event did not occur" + }, + "stopped_at": { + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "title": "Stopped At", + "description": "Timestamp at which the solver finished or killed execution or None if the event did not occur" + } + }, + "type": "object", + "required": [ + "job_id", + "state", + "submitted_at" + ], + "title": "JobStatus", + "example": { + "job_id": "145beae4-a3a8-4fde-adbb-4e8257c2c083", + "progress": 3, + "started_at": "2021-04-01 07:16:43.670610", + "state": "STARTED", + "submitted_at": "2021-04-01 07:15:54.631007" + } + }, + "LicensedItemCheckoutData": { + "properties": { + "number_of_seats": { + "type": "integer", + "exclusiveMinimum": true, + "title": "Number Of Seats", + "minimum": 0 + }, + "service_run_id": { + "type": "string", + "title": "Service Run Id" + } + }, + "type": "object", + "required": [ + "number_of_seats", + "service_run_id" + ], + "title": "LicensedItemCheckoutData" + }, + "LicensedItemCheckoutGet": { + "properties": { + "licensed_item_checkout_id": { + "type": "string", + "format": "uuid", + "title": "Licensed Item Checkout Id" + }, + "licensed_item_id": { + "type": "string", + "format": "uuid", + "title": "Licensed Item Id" + }, + "key": { + "type": "string", + "title": "Key" + }, + "version": { + "type": "string", + "pattern": "^\\d+\\.\\d+\\.\\d+$", + "title": "Version" + }, + "wallet_id": { + "type": "integer", + "exclusiveMinimum": true, + "title": "Wallet Id", + "minimum": 0 + }, + "user_id": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + }, + "product_name": { + "type": "string", + "title": "Product Name" + }, + "started_at": { + "type": "string", + "format": "date-time", + "title": "Started At" + }, + "stopped_at": { + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "title": "Stopped At" + }, + "num_of_seats": { + "type": "integer", + "title": "Num Of Seats" + } + }, + "type": "object", + "required": [ + "licensed_item_checkout_id", + "licensed_item_id", + "key", + "version", + "wallet_id", + "user_id", + "product_name", + "started_at", + "stopped_at", + "num_of_seats" + ], + "title": "LicensedItemCheckoutGet" }, - "JobOutputs": { + "LicensedItemGet": { "properties": { - "job_id": { + "licensed_item_id": { "type": "string", "format": "uuid", - "title": "Job Id", - "description": "Job that produced this output" + "title": "Licensed Item Id" }, - "results": { - "additionalProperties": { - "anyOf": [ - { - "$ref": "#/components/schemas/File" - }, - { - "type": "number" - }, - { - "type": "integer" - }, - { - "type": "boolean" - }, - { - "type": "string" - }, - { - "items": {}, - "type": "array" - }, - { - "type": "null" - } - ] + "key": { + "type": "string", + "title": "Key" + }, + "version": { + "type": "string", + "pattern": "^\\d+\\.\\d+\\.\\d+$", + "title": "Version" + }, + "display_name": { + "type": "string", + "title": "Display Name" + }, + "licensed_resource_type": { + "$ref": "#/components/schemas/LicensedResourceType" + }, + "licensed_resources": { + "items": { + "$ref": "#/components/schemas/LicensedResource" }, - "type": "object", - "title": "Results" + "type": "array", + "title": "Licensed Resources" + }, + "pricing_plan_id": { + "type": "integer", + "exclusiveMinimum": true, + "title": "Pricing Plan Id", + "minimum": 0 + }, + "is_hidden_on_market": { + "type": "boolean", + "title": "Is Hidden On Market" + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created At" + }, + "modified_at": { + "type": "string", + "format": "date-time", + "title": "Modified At" } }, "type": "object", "required": [ - "job_id", - "results" + "licensed_item_id", + "key", + "version", + "display_name", + "licensed_resource_type", + "licensed_resources", + "pricing_plan_id", + "is_hidden_on_market", + "created_at", + "modified_at" ], - "title": "JobOutputs", - "example": { - "job_id": "99d9ac65-9f10-4e2f-a433-b5e412bb037b", - "results": { - "enabled": false, - "maxSAR": 4.33, - "n": 55, - "output_file": { - "filename": "sar_matrix.txt", - "id": "0a3b2c56-dbcd-4871-b93b-d454b7883f9f" - }, - "title": "Specific Absorption Rate" - } - } + "title": "LicensedItemGet" }, - "JobStatus": { + "LicensedResource": { "properties": { - "job_id": { + "source": { + "$ref": "#/components/schemas/LicensedResourceSource" + }, + "category_id": { "type": "string", - "format": "uuid", - "title": "Job Id" + "maxLength": 100, + "minLength": 1, + "title": "Category Id" }, - "state": { - "$ref": "#/components/schemas/RunningState" + "category_display": { + "type": "string", + "title": "Category Display" }, - "progress": { + "terms_of_use_url": { + "anyOf": [ + { + "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri" + }, + { + "type": "null" + } + ], + "title": "Terms Of Use Url" + } + }, + "type": "object", + "required": [ + "source", + "category_id", + "category_display", + "terms_of_use_url" + ], + "title": "LicensedResource" + }, + "LicensedResourceSource": { + "properties": { + "id": { "type": "integer", - "maximum": 100, - "minimum": 0, - "title": "Progress", - "default": 0 + "title": "Id" }, - "submitted_at": { + "description": { "type": "string", - "format": "date-time", - "title": "Submitted At", - "description": "Last modification timestamp of the solver job" + "title": "Description" }, - "started_at": { + "thumbnail": { + "type": "string", + "title": "Thumbnail" + }, + "features": { + "$ref": "#/components/schemas/LicensedResourceSourceFeaturesDict" + }, + "doi": { "anyOf": [ { - "type": "string", - "format": "date-time" + "type": "string" }, { "type": "null" } ], - "title": "Started At", - "description": "Timestamp that indicate the moment the solver starts execution or None if the event did not occur" + "title": "Doi" }, - "stopped_at": { + "license_key": { + "type": "string", + "title": "License Key" + }, + "license_version": { + "type": "string", + "title": "License Version" + }, + "protection": { + "type": "string", + "enum": [ + "Code", + "PayPal" + ], + "title": "Protection" + }, + "available_from_url": { "anyOf": [ { "type": "string", - "format": "date-time" + "maxLength": 2083, + "minLength": 1, + "format": "uri" }, { "type": "null" } ], - "title": "Stopped At", - "description": "Timestamp at which the solver finished or killed execution or None if the event did not occur" + "title": "Available From Url" } }, "type": "object", "required": [ - "job_id", - "state", - "submitted_at" + "id", + "description", + "thumbnail", + "features", + "doi", + "license_key", + "license_version", + "protection", + "available_from_url" + ], + "title": "LicensedResourceSource" + }, + "LicensedResourceSourceFeaturesDict": { + "properties": { + "age": { + "type": "string", + "title": "Age" + }, + "date": { + "type": "string", + "format": "date", + "title": "Date" + }, + "ethnicity": { + "type": "string", + "title": "Ethnicity" + }, + "functionality": { + "type": "string", + "title": "Functionality" + }, + "height": { + "type": "string", + "title": "Height" + }, + "name": { + "type": "string", + "title": "Name" + }, + "sex": { + "type": "string", + "title": "Sex" + }, + "species": { + "type": "string", + "title": "Species" + }, + "version": { + "type": "string", + "title": "Version" + }, + "weight": { + "type": "string", + "title": "Weight" + } + }, + "type": "object", + "required": [ + "date" ], - "title": "JobStatus", - "example": { - "job_id": "145beae4-a3a8-4fde-adbb-4e8257c2c083", - "progress": 3, - "started_at": "2021-04-01 07:16:43.670610", - "state": "STARTED", - "submitted_at": "2021-04-01 07:15:54.631007" - } + "title": "LicensedResourceSourceFeaturesDict" + }, + "LicensedResourceType": { + "type": "string", + "enum": [ + "VIP_MODEL" + ], + "title": "LicensedResourceType" }, "Links": { "properties": { @@ -6409,6 +7238,65 @@ ], "title": "Page[Job]" }, + "Page_LicensedItemGet_": { + "properties": { + "items": { + "items": { + "$ref": "#/components/schemas/LicensedItemGet" + }, + "type": "array", + "title": "Items" + }, + "total": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Total" + }, + "limit": { + "anyOf": [ + { + "type": "integer", + "minimum": 1 + }, + { + "type": "null" + } + ], + "title": "Limit" + }, + "offset": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Offset" + }, + "links": { + "$ref": "#/components/schemas/Links" + } + }, + "type": "object", + "required": [ + "items", + "total", + "limit", + "offset", + "links" + ], + "title": "Page[LicensedItemGet]" + }, "Page_Study_": { "properties": { "items": { @@ -6489,7 +7377,7 @@ "title": "Unitname" }, "unitExtraInfo": { - "$ref": "#/components/schemas/UnitExtraInfo" + "$ref": "#/components/schemas/UnitExtraInfoTier" }, "currentCostPerUnit": { "type": "number", @@ -6898,7 +7786,7 @@ "kind": "input" } }, - "UnitExtraInfo": { + "UnitExtraInfoTier": { "properties": { "CPU": { "type": "integer", @@ -6923,7 +7811,7 @@ "RAM", "VRAM" ], - "title": "UnitExtraInfo", + "title": "UnitExtraInfoTier", "description": "Custom information that is propagated to the frontend. Defined fields are mandatory." }, "UploadLinks": { diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index 2539c60c7fe..3df605fe67f 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -187,8 +187,11 @@ fastapi==0.115.6 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager fastapi-cli==0.0.6 # via fastapi +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in fastapi-pagination==0.12.32 # via -r requirements/_base.in faststream==0.5.33 @@ -249,7 +252,6 @@ httpx==0.27.2 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt - # -c requirements/./constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # fastapi @@ -527,6 +529,8 @@ psycopg2-binary==2.9.10 # sqlalchemy pycparser==2.22 # via cffi +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.3 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -615,6 +619,34 @@ pydantic-extra-types==2.10.0 # fastapi pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -797,6 +829,10 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/api-server/requirements/_test.in b/services/api-server/requirements/_test.in index 119a67d3f24..718feaeb205 100644 --- a/services/api-server/requirements/_test.in +++ b/services/api-server/requirements/_test.in @@ -19,6 +19,7 @@ docker faker jsonref moto[server] # mock out tests based on AWS-S3 +pact-python pyinstrument pytest pytest-asyncio diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index 39e78546e02..e4a95cfd8ec 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -17,10 +17,15 @@ alembic==1.14.0 # via # -c requirements/_base.txt # -r requirements/_test.in +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic anyio==4.7.0 # via # -c requirements/_base.txt # httpx + # starlette asgi-lifespan==2.1.0 # via -r requirements/_test.in attrs==24.2.0 @@ -48,7 +53,7 @@ botocore==1.35.99 # boto3 # moto # s3transfer -botocore-stubs==1.36.6 +botocore-stubs==1.37.4 # via types-boto3 certifi==2024.8.30 # via @@ -61,6 +66,7 @@ cffi==1.17.1 # via # -c requirements/_base.txt # cryptography + # pact-python cfn-lint==0.72.0 # via # -c requirements/./constraints.txt @@ -74,7 +80,9 @@ click==8.1.7 # -c requirements/_base.txt # -r requirements/_test.in # flask -coverage==7.6.10 + # pact-python + # uvicorn +coverage==7.6.12 # via pytest-cov cryptography==44.0.0 # via @@ -92,13 +100,17 @@ ecdsa==0.19.0 # moto # python-jose # sshpubkeys -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in +fastapi==0.115.6 + # via + # -c requirements/_base.txt + # pact-python flask==2.1.3 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto frozenlist==1.5.0 # via @@ -115,6 +127,7 @@ h11==0.14.0 # via # -c requirements/_base.txt # httpcore + # uvicorn httpcore==1.0.7 # via # -c requirements/_base.txt @@ -154,7 +167,7 @@ jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint -jsonpickle==4.0.1 +jsonpickle==4.0.2 # via jschema-to-python jsonpointer==3.0.0 # via jsonpatch @@ -183,7 +196,6 @@ markupsafe==3.0.2 # moto moto==4.0.1 # via - # -c requirements/../../../requirements/constraints.txt # -c requirements/./constraints.txt # -r requirements/_test.in multidict==6.1.0 @@ -191,7 +203,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -206,7 +218,9 @@ packaging==24.2 # -c requirements/_base.txt # aioresponses # pytest -pbr==6.1.0 +pact-python==2.3.1 + # via -r requirements/_test.in +pbr==6.1.1 # via # jschema-to-python # sarif-om @@ -217,7 +231,11 @@ propcache==0.2.1 # -c requirements/_base.txt # aiohttp # yarl -pyasn1==0.6.1 +psutil==6.1.0 + # via + # -c requirements/_base.txt + # pact-python +pyasn1==0.4.8 # via # python-jose # rsa @@ -225,6 +243,15 @@ pycparser==2.22 # via # -c requirements/_base.txt # cffi +pydantic==2.10.3 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fastapi +pydantic-core==2.27.1 + # via + # -c requirements/_base.txt + # pydantic pyinstrument==5.0.0 # via # -c requirements/_base.txt @@ -235,7 +262,7 @@ pyrsistent==0.20.0 # via # -c requirements/_base.txt # jsonschema -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -248,7 +275,7 @@ pytest-asyncio==0.23.8 # -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-mock==3.14.0 # via -r requirements/_test.in @@ -258,11 +285,10 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto -python-jose==3.3.0 +python-jose==3.4.0 # via moto -pytz==2024.2 +pytz==2025.1 # via moto pyyaml==6.0.2 # via @@ -278,6 +304,7 @@ requests==2.32.3 # -c requirements/_base.txt # docker # moto + # pact-python # responses responses==0.25.6 # via moto @@ -297,12 +324,14 @@ setuptools==75.6.0 # jsonschema # moto # openapi-spec-validator + # pbr six==1.17.0 # via # -c requirements/_base.txt # ecdsa # jsonschema # junit-xml + # pact-python # python-dateutil sniffio==1.3.1 # via @@ -320,23 +349,32 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sshpubkeys==3.3.1 # via moto +starlette==0.41.3 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fastapi types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -types-awscrt==0.23.7 +types-awscrt==0.23.10 # via botocore-stubs -types-boto3==1.36.6 +types-boto3==1.37.4 # via -r requirements/_test.in -types-s3transfer==0.11.2 +types-s3transfer==0.11.3 # via types-boto3 typing-extensions==4.12.2 # via # -c requirements/_base.txt # alembic # anyio - # faker + # fastapi # mypy + # pydantic + # pydantic-core # sqlalchemy2-stubs # types-boto3 +tzdata==2025.1 + # via faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt @@ -345,9 +383,14 @@ urllib3==2.2.3 # docker # requests # responses +uvicorn==0.32.1 + # via + # -c requirements/_base.txt + # pact-python werkzeug==2.1.2 # via # flask + # flask-cors # moto wrapt==1.17.0 # via @@ -359,3 +402,4 @@ yarl==1.18.3 # via # -c requirements/_base.txt # aiohttp + # pact-python diff --git a/services/api-server/requirements/_tools.txt b/services/api-server/requirements/_tools.txt index df593cf1519..4aa7b67f2d2 100644 --- a/services/api-server/requirements/_tools.txt +++ b/services/api-server/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -22,9 +22,9 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint @@ -41,7 +41,7 @@ markupsafe==3.0.2 # jinja2 mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -60,7 +60,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -72,7 +72,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -85,7 +85,7 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt setuptools==75.6.0 # via @@ -99,7 +99,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/api-server/src/simcore_service_api_server/api/routes/files.py b/services/api-server/src/simcore_service_api_server/api/routes/files.py index 5bfa3ab030d..0821d81abab 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/files.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/files.py @@ -10,7 +10,11 @@ from fastapi import Header, Request, UploadFile, status from fastapi.exceptions import HTTPException from fastapi_pagination.api import create_page -from models_library.api_schemas_storage import ETag, FileUploadCompletionBody, LinkType +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + FileUploadCompletionBody, + LinkType, +) from models_library.basic_types import SHA256Str from pydantic import AnyUrl, ByteSize, PositiveInt, TypeAdapter, ValidationError from servicelib.fastapi.requests_decorators import cancel_on_disconnect diff --git a/services/api-server/src/simcore_service_api_server/api/routes/licensed_items.py b/services/api-server/src/simcore_service_api_server/api/routes/licensed_items.py index 58c2a695f90..39d5dbc4394 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/licensed_items.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/licensed_items.py @@ -1,7 +1,7 @@ from typing import Annotated, Any from fastapi import APIRouter, Depends, HTTPException, status -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, ) @@ -31,7 +31,6 @@ status_code=status.HTTP_200_OK, responses=_LICENSE_ITEMS_STATUS_CODES, description="Get all licensed items", - include_in_schema=False, ) async def get_licensed_items( page_params: Annotated[PaginationParams, Depends()], @@ -49,7 +48,6 @@ async def get_licensed_items( status_code=status.HTTP_200_OK, responses=_LICENSE_ITEMS_STATUS_CODES, description="Release previously checked out licensed item", - include_in_schema=False, ) async def release_licensed_item( web_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], diff --git a/services/api-server/src/simcore_service_api_server/api/routes/wallets.py b/services/api-server/src/simcore_service_api_server/api/routes/wallets.py index e992d94704a..eff00142cdc 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/wallets.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/wallets.py @@ -2,7 +2,7 @@ from typing import Annotated, Any from fastapi import APIRouter, Depends, status -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from pydantic import PositiveInt from ...api.dependencies.authentication import get_current_user_id, get_product_name @@ -68,7 +68,6 @@ async def get_wallet( status_code=status.HTTP_200_OK, responses=WALLET_STATUS_CODES, description="Get all available licensed items for a given wallet", - include_in_schema=False, ) async def get_available_licensed_items_for_wallet( wallet_id: int, @@ -91,7 +90,6 @@ async def get_available_licensed_items_for_wallet( status_code=status.HTTP_200_OK, responses=WALLET_STATUS_CODES, description="Checkout licensed item", - include_in_schema=False, ) async def checkout_licensed_item( wallet_id: int, diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 4de017698a6..cde8280313a 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -4,8 +4,8 @@ from fastapi_pagination import add_pagination from models_library.basic_types import BootModeEnum from packaging.version import Version -from servicelib.fastapi.profiler_middleware import ProfilerMiddleware -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.profiler import initialize_profiler +from servicelib.fastapi.tracing import initialize_tracing from servicelib.logging_utils import config_all_loggers from .. import exceptions @@ -84,7 +84,7 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: setup_rabbitmq(app) if settings.API_SERVER_TRACING: - setup_tracing(app, settings.API_SERVER_TRACING, APP_NAME) + initialize_tracing(app, settings.API_SERVER_TRACING, APP_NAME) if settings.API_SERVER_WEBSERVER: webserver.setup( @@ -123,7 +123,7 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: ) if settings.API_SERVER_PROFILING: - app.add_middleware(ProfilerMiddleware) + initialize_profiler(app) if app.state.settings.API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py b/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py index e970f819ccc..400b4e8b1c1 100644 --- a/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py +++ b/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py @@ -4,7 +4,7 @@ import sqlalchemy as sa from models_library.products import ProductName from pydantic.types import PositiveInt -from simcore_postgres_database.errors import DatabaseError +from simcore_postgres_database.aiopg_errors import DatabaseError from .. import tables as tbl from ._base import BaseRepository diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/groups_extra_properties.py b/services/api-server/src/simcore_service_api_server/db/repositories/groups_extra_properties.py index 8193201baee..847ae692674 100644 --- a/services/api-server/src/simcore_service_api_server/db/repositories/groups_extra_properties.py +++ b/services/api-server/src/simcore_service_api_server/db/repositories/groups_extra_properties.py @@ -1,7 +1,7 @@ import logging from models_library.users import UserID -from simcore_postgres_database.errors import DatabaseError +from simcore_postgres_database.aiopg_errors import DatabaseError from simcore_postgres_database.utils_groups_extra_properties import ( GroupExtraPropertiesRepo, ) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py index 11c3e65a28e..3653038ed28 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py @@ -108,7 +108,7 @@ class ProjectAlreadyStartedError(BaseBackEndError): class InsufficientNumberOfSeatsError(BaseBackEndError): - msg_template = "Not enough available seats. Current available seats {num_of_seats} for license item {licensed_item_id}" + msg_template = "Not enough available seats for license item {licensed_item_id}" status_code = status.HTTP_409_CONFLICT diff --git a/services/api-server/src/simcore_service_api_server/main.py b/services/api-server/src/simcore_service_api_server/main.py index 493874ee6eb..8b636ac4315 100644 --- a/services/api-server/src/simcore_service_api_server/main.py +++ b/services/api-server/src/simcore_service_api_server/main.py @@ -1,8 +1,7 @@ """Main application to be deployed in for example uvicorn. """ from fastapi import FastAPI - -from .core.application import init_app +from simcore_service_api_server.core.application import init_app # SINGLETON FastAPI app the_app: FastAPI = init_app() diff --git a/services/api-server/src/simcore_service_api_server/models/domain/projects.py b/services/api-server/src/simcore_service_api_server/models/domain/projects.py index ba57e923109..ae74533546b 100644 --- a/services/api-server/src/simcore_service_api_server/models/domain/projects.py +++ b/services/api-server/src/simcore_service_api_server/models/domain/projects.py @@ -3,14 +3,12 @@ from models_library.projects_access import AccessRights from models_library.projects_nodes import InputTypes, Node, OutputTypes from models_library.projects_nodes_io import SimCoreFileLink -from models_library.projects_ui import StudyUI assert AccessRights # nosec assert InputTypes # nosec assert Node # nosec assert OutputTypes # nosec assert SimCoreFileLink # nosec -assert StudyUI # nosec __all__: tuple[str, ...] = ( "AccessRights", @@ -18,5 +16,4 @@ "Node", "OutputTypes", "SimCoreFileLink", - "StudyUI", ) diff --git a/services/api-server/src/simcore_service_api_server/models/pagination.py b/services/api-server/src/simcore_service_api_server/models/pagination.py index c8f883f05e4..935cd044f20 100644 --- a/services/api-server/src/simcore_service_api_server/models/pagination.py +++ b/services/api-server/src/simcore_service_api_server/models/pagination.py @@ -1,4 +1,4 @@ -""" Overrides models in fastapi_pagination +"""Overrides models in fastapi_pagination Usage: from fastapi_pagination.api import create_page @@ -11,7 +11,6 @@ from fastapi import Query from fastapi_pagination.customization import CustomizedPage, UseName, UseParamsFields -from fastapi_pagination.limit_offset import LimitOffsetParams as _LimitOffsetParams from fastapi_pagination.links import LimitOffsetPage as _LimitOffsetPage from models_library.rest_pagination import ( DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, @@ -43,7 +42,7 @@ UseName(name="Page"), ] -PaginationParams: TypeAlias = _LimitOffsetParams +PaginationParams: TypeAlias = Page.__params_type__ # type: ignore class OnePage(BaseModel, Generic[T]): diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/files.py b/services/api-server/src/simcore_service_api_server/models/schemas/files.py index 78651edfef1..29cc9aacf0a 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/files.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/files.py @@ -7,7 +7,7 @@ import aiofiles from fastapi import UploadFile -from models_library.api_schemas_storage import ETag +from models_library.api_schemas_storage.storage_schemas import ETag from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import StorageFileID from pydantic import ( diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/model_adapter.py b/services/api-server/src/simcore_service_api_server/models/schemas/model_adapter.py index bd933fb883e..8e0bcb3cba1 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/model_adapter.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/model_adapter.py @@ -1,8 +1,8 @@ # Models added here "cover" models from within the deployment in order to restore backwards compatibility -from datetime import datetime +from datetime import date, datetime from decimal import Decimal -from typing import Annotated, Any +from typing import Annotated, Literal, NotRequired from models_library.api_schemas_api_server.pricing_plans import ( ServicePricingPlanGet as _ServicePricingPlanGet, @@ -10,11 +10,20 @@ from models_library.api_schemas_webserver.licensed_items import ( LicensedItemRpcGet as _LicensedItemGet, ) +from models_library.api_schemas_webserver.licensed_items import ( + LicensedResource as _LicensedResource, +) +from models_library.api_schemas_webserver.licensed_items import ( + LicensedResourceSource as _LicensedResourceSource, +) +from models_library.api_schemas_webserver.licensed_items import ( + LicensedResourceSourceFeaturesDict as _LicensedResourceSourceFeaturesDict, +) from models_library.api_schemas_webserver.licensed_items_checkouts import ( LicensedItemCheckoutRpcGet as _LicensedItemCheckoutRpcGet, ) -from models_library.api_schemas_webserver.product import ( - GetCreditPrice as _GetCreditPrice, +from models_library.api_schemas_webserver.products import ( + CreditPriceGet as _GetCreditPrice, ) from models_library.api_schemas_webserver.resource_usage import ( PricingUnitGet as _PricingUnitGet, @@ -24,13 +33,18 @@ ) from models_library.basic_types import IDStr, NonNegativeDecimal from models_library.groups import GroupID -from models_library.licensed_items import LicensedItemID, LicensedResourceType +from models_library.licenses import ( + LicensedItemID, + LicensedItemKey, + LicensedItemVersion, + LicensedResourceType, +) from models_library.products import ProductName from models_library.resource_tracker import ( PricingPlanClassification, PricingPlanId, PricingUnitId, - UnitExtraInfo, + UnitExtraInfoTier, ) from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -41,10 +55,12 @@ BaseModel, ConfigDict, Field, + HttpUrl, NonNegativeFloat, NonNegativeInt, PlainSerializer, ) +from typing_extensions import TypedDict class GetCreditPriceLegacy(BaseModel): @@ -72,7 +88,7 @@ class GetCreditPriceLegacy(BaseModel): ) -assert set(GetCreditPriceLegacy.model_fields.keys()) == set( +assert set(GetCreditPriceLegacy.model_fields.keys()) == set( # nosec _GetCreditPrice.model_fields.keys() ) @@ -80,7 +96,9 @@ class GetCreditPriceLegacy(BaseModel): class PricingUnitGetLegacy(BaseModel): pricing_unit_id: PricingUnitId = Field(alias="pricingUnitId") unit_name: str = Field(alias="unitName") - unit_extra_info: UnitExtraInfo = Field(alias="unitExtraInfo") + unit_extra_info: UnitExtraInfoTier = Field( + alias="unitExtraInfo" + ) # <-- NOTE: API Server is interested only in the TIER type current_cost_per_unit: Annotated[ Decimal, PlainSerializer(float, return_type=NonNegativeFloat, when_used="json") ] = Field(alias="currentCostPerUnit") @@ -90,7 +108,7 @@ class PricingUnitGetLegacy(BaseModel): ) -assert set(PricingUnitGetLegacy.model_fields.keys()) == set( +assert set(PricingUnitGetLegacy.model_fields.keys()) == set( # nosec _PricingUnitGet.model_fields.keys() ) @@ -112,7 +130,7 @@ class WalletGetWithAvailableCreditsLegacy(BaseModel): ) -assert set(WalletGetWithAvailableCreditsLegacy.model_fields.keys()) == set( +assert set(WalletGetWithAvailableCreditsLegacy.model_fields.keys()) == set( # nosec _WalletGetWithAvailableCredits.model_fields.keys() ) @@ -130,17 +148,81 @@ class ServicePricingPlanGetLegacy(BaseModel): ) -assert set(ServicePricingPlanGetLegacy.model_fields.keys()) == set( +assert set(ServicePricingPlanGetLegacy.model_fields.keys()) == set( # nosec _ServicePricingPlanGet.model_fields.keys() ) +class LicensedResourceSourceFeaturesDict(TypedDict): + age: NotRequired[str] + date: date + ethnicity: NotRequired[str] + functionality: NotRequired[str] + height: NotRequired[str] + name: NotRequired[str] + sex: NotRequired[str] + species: NotRequired[str] + version: NotRequired[str] + weight: NotRequired[str] + + +assert set(LicensedResourceSourceFeaturesDict.__annotations__.keys()) == set( # nosec + _LicensedResourceSourceFeaturesDict.__annotations__.keys() +), "LicensedResourceSourceFeaturesDict keys do not match" + +for key in LicensedResourceSourceFeaturesDict.__annotations__: + assert ( # nosec + LicensedResourceSourceFeaturesDict.__annotations__[key] + == _LicensedResourceSourceFeaturesDict.__annotations__[key] + ), f"Type of {key} in LicensedResourceSourceFeaturesDict does not match" + + +class LicensedResourceSource(BaseModel): + id: int + description: str + thumbnail: str + features: LicensedResourceSourceFeaturesDict + doi: str | None + license_key: str + license_version: str + protection: Literal["Code", "PayPal"] + available_from_url: HttpUrl | None + + +assert set(LicensedResourceSource.model_fields.keys()) == set( # nosec + _LicensedResourceSource.model_fields.keys() +), "LicensedResourceSource keys do not match" + +for key in LicensedResourceSource.model_fields.keys(): + if key == "features": + continue + assert ( # nosec + LicensedResourceSource.__annotations__[key] + == _LicensedResourceSource.__annotations__[key] + ), f"Type of {key} in LicensedResourceSource does not match" + + +class LicensedResource(BaseModel): + source: LicensedResourceSource + category_id: IDStr + category_display: str + terms_of_use_url: HttpUrl | None + + +assert set(LicensedResource.__annotations__.keys()) == set( # nosec + _LicensedResource.__annotations__.keys() +), "LicensedResource keys do not match" + + class LicensedItemGet(BaseModel): licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion display_name: str licensed_resource_type: LicensedResourceType - licensed_resource_data: dict[str, Any] + licensed_resources: list[LicensedResource] pricing_plan_id: PricingPlanId + is_hidden_on_market: bool created_at: datetime modified_at: datetime model_config = ConfigDict( @@ -148,7 +230,7 @@ class LicensedItemGet(BaseModel): ) -assert set(LicensedItemGet.model_fields.keys()) == set( +assert set(LicensedItemGet.model_fields.keys()) == set( # nosec _LicensedItemGet.model_fields.keys() ) @@ -156,6 +238,8 @@ class LicensedItemGet(BaseModel): class LicensedItemCheckoutGet(BaseModel): licensed_item_checkout_id: LicensedItemCheckoutID licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID user_id: UserID product_name: ProductName @@ -164,6 +248,6 @@ class LicensedItemCheckoutGet(BaseModel): num_of_seats: int -assert set(LicensedItemCheckoutGet.model_fields.keys()) == set( +assert set(LicensedItemCheckoutGet.model_fields.keys()) == set( # nosec _LicensedItemCheckoutRpcGet.model_fields.keys() ) diff --git a/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py index 44c5ca993aa..86c3f8bc53d 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py @@ -11,12 +11,13 @@ import arrow from models_library.api_schemas_webserver.projects import ProjectCreateNew, ProjectGet +from models_library.api_schemas_webserver.projects_ui import StudyUI from models_library.basic_types import KeyIDStr from models_library.projects_nodes import InputID from pydantic import TypeAdapter from ..models.basic_types import VersionStr -from ..models.domain.projects import InputTypes, Node, SimCoreFileLink, StudyUI +from ..models.domain.projects import InputTypes, Node, SimCoreFileLink from ..models.schemas.files import File from ..models.schemas.jobs import ( ArgumentTypes, @@ -167,7 +168,7 @@ def create_new_project_for_job( }, }, slideshow={}, - currentNodeId=solver_id, # type: ignore[arg-type] + current_node_id=solver_id, # type: ignore[arg-type] annotations={}, ), accessRights={}, # type: ignore[call-arg] # This MUST be called with alias diff --git a/services/api-server/src/simcore_service_api_server/services_http/storage.py b/services/api-server/src/simcore_service_api_server/services_http/storage.py index 9616b1541bd..52d3c8e8ddb 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/storage.py +++ b/services/api-server/src/simcore_service_api_server/services_http/storage.py @@ -8,9 +8,14 @@ from fastapi import FastAPI from fastapi.encoders import jsonable_encoder -from models_library.api_schemas_storage import FileMetaDataArray -from models_library.api_schemas_storage import FileMetaDataGet as StorageFileMetaData -from models_library.api_schemas_storage import FileUploadSchema, PresignedLink +from models_library.api_schemas_storage.storage_schemas import FileMetaDataArray +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet as StorageFileMetaData, +) +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadSchema, + PresignedLink, +) from models_library.basic_types import SHA256Str from models_library.generics import Envelope from pydantic import AnyUrl, PositiveInt diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/resource_usage_tracker.py b/services/api-server/src/simcore_service_api_server/services_rpc/resource_usage_tracker.py index 371263a9880..82e3ea1d369 100644 --- a/services/api-server/src/simcore_service_api_server/services_rpc/resource_usage_tracker.py +++ b/services/api-server/src/simcore_service_api_server/services_rpc/resource_usage_tracker.py @@ -44,6 +44,8 @@ async def get_licensed_item_checkout( return LicensedItemCheckoutGet( licensed_item_checkout_id=_licensed_item_checkout.licensed_item_checkout_id, licensed_item_id=_licensed_item_checkout.licensed_item_id, + key=_licensed_item_checkout.key, + version=_licensed_item_checkout.version, wallet_id=_licensed_item_checkout.wallet_id, user_id=_licensed_item_checkout.user_id, product_name=_licensed_item_checkout.product_name, diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py index 6eaa5b60c42..0cee0a7aef1 100644 --- a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py +++ b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py @@ -5,7 +5,7 @@ from fastapi import FastAPI from fastapi_pagination import create_page from models_library.api_schemas_webserver.licensed_items import LicensedItemRpcGetPage -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, ) @@ -46,7 +46,11 @@ ) from ..exceptions.service_errors_utils import service_exception_mapper from ..models.pagination import Page, PaginationParams -from ..models.schemas.model_adapter import LicensedItemCheckoutGet, LicensedItemGet +from ..models.schemas.model_adapter import ( + LicensedItemCheckoutGet, + LicensedItemGet, + LicensedResource, +) _exception_mapper = partial(service_exception_mapper, service_name="WebApiServer") @@ -58,10 +62,16 @@ def _create_licensed_items_get_page( [ LicensedItemGet( licensed_item_id=elm.licensed_item_id, + key=elm.key, + version=elm.version, display_name=elm.display_name, licensed_resource_type=elm.licensed_resource_type, - licensed_resource_data=elm.licensed_resource_data, + licensed_resources=[ + LicensedResource.model_validate(res.model_dump()) + for res in elm.licensed_resources + ], pricing_plan_id=elm.pricing_plan_id, + is_hidden_on_market=elm.is_hidden_on_market, created_at=elm.created_at, modified_at=elm.modified_at, ) @@ -118,6 +128,7 @@ async def get_available_licensed_items_for_wallet( NotEnoughAvailableSeatsError: InsufficientNumberOfSeatsError, CanNotCheckoutNotEnoughAvailableSeatsError: InsufficientNumberOfSeatsError, _CanNotCheckoutServiceIsNotRunningError: CanNotCheckoutServiceIsNotRunningError, + # NOTE: missing WalletAccessForbiddenError } ) async def checkout_licensed_item_for_wallet( @@ -142,6 +153,8 @@ async def checkout_licensed_item_for_wallet( return LicensedItemCheckoutGet( licensed_item_checkout_id=licensed_item_checkout_get.licensed_item_checkout_id, licensed_item_id=licensed_item_checkout_get.licensed_item_id, + key=licensed_item_checkout_get.key, + version=licensed_item_checkout_get.version, wallet_id=licensed_item_checkout_get.wallet_id, user_id=licensed_item_checkout_get.user_id, product_name=licensed_item_checkout_get.product_name, @@ -171,6 +184,8 @@ async def release_licensed_item_for_wallet( return LicensedItemCheckoutGet( licensed_item_checkout_id=licensed_item_checkout_get.licensed_item_checkout_id, licensed_item_id=licensed_item_checkout_get.licensed_item_id, + key=licensed_item_checkout_get.key, + version=licensed_item_checkout_get.version, wallet_id=licensed_item_checkout_get.wallet_id, user_id=licensed_item_checkout_get.user_id, product_name=licensed_item_checkout_get.product_name, diff --git a/services/api-server/tests/unit/_with_db/data/docker-compose.yml b/services/api-server/tests/unit/_with_db/data/docker-compose.yml index ae76474af7c..fdba1f9bb90 100644 --- a/services/api-server/tests/unit/_with_db/data/docker-compose.yml +++ b/services/api-server/tests/unit/_with_db/data/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce + image: "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f" environment: - POSTGRES_USER=${POSTGRES_USER:-test} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-test} diff --git a/services/api-server/tests/unit/api_solvers/conftest.py b/services/api-server/tests/unit/api_solvers/conftest.py index 4d008380aaf..82c125139b6 100644 --- a/services/api-server/tests/unit/api_solvers/conftest.py +++ b/services/api-server/tests/unit/api_solvers/conftest.py @@ -60,7 +60,7 @@ def mocked_catalog_service_api( json=[ # one solver faker_catalog.create_service_out( - key="simcore/services/comp/Foo", name="Foo" + key="simcore/services/comp/foo", name="foo" ), # two version of the same solver faker_catalog.create_service_out(version="0.0.1"), diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 0706a5b1d8f..83db84a5d2c 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -27,7 +27,7 @@ TaskProgress, TaskStatus, ) -from models_library.api_schemas_storage import HealthCheck +from models_library.api_schemas_storage.storage_schemas import HealthCheck from models_library.api_schemas_webserver.projects import ProjectGet from models_library.app_diagnostics import AppStatusCheck from models_library.generics import Envelope @@ -121,15 +121,18 @@ async def client( # # LifespanManager will trigger app's startup&shutown event handlers - async with LifespanManager( - app, - startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, - shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, - ), httpx.AsyncClient( - base_url="http://api.testserver.io", - headers={"Content-Type": "application/json"}, - transport=ASGITransport(app=app), - ) as httpx_async_client: + async with ( + LifespanManager( + app, + startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, + shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, + ), + httpx.AsyncClient( + base_url="http://api.testserver.io", + headers={"Content-Type": "application/json"}, + transport=ASGITransport(app=app), + ) as httpx_async_client, + ): assert isinstance(httpx_async_client, httpx.AsyncClient) yield httpx_async_client @@ -242,11 +245,8 @@ def webserver_service_openapi_specs( def storage_service_openapi_specs( osparc_simcore_services_dir: Path, ) -> dict[str, Any]: - openapi_path = ( - osparc_simcore_services_dir - / "storage/src/simcore_service_storage/api/v0/openapi.yaml" - ) - return yaml.safe_load(openapi_path.read_text()) + openapi_path = osparc_simcore_services_dir / "storage" / "openapi.json" + return json.loads(openapi_path.read_text()) @pytest.fixture @@ -355,9 +355,9 @@ def mocked_storage_service_api_base( base_url=settings.API_SERVER_STORAGE.base_url, assert_all_called=False, ) as respx_mock: - assert openapi["paths"]["/v0/"]["get"]["operationId"] == "health_check" + assert openapi["paths"]["/v0/"]["get"]["operationId"] == "get_health_v0__get" - respx_mock.get(path="/v0/", name="health_check").respond( + respx_mock.get(path="/v0/", name="get_health_v0__get").respond( status.HTTP_200_OK, json=Envelope[HealthCheck]( data={ @@ -369,8 +369,11 @@ def mocked_storage_service_api_base( ).model_dump(), ) - assert openapi["paths"]["/v0/status"]["get"]["operationId"] == "get_status" - respx_mock.get(path="/v0/status", name="get_status").respond( + assert ( + openapi["paths"]["/v0/status"]["get"]["operationId"] + == "get_status_v0_status_get" + ) + respx_mock.get(path="/v0/status", name="get_status_v0_status_get").respond( status.HTTP_200_OK, json=Envelope[AppStatusCheck]( data={ diff --git a/services/api-server/tests/unit/pact_broker/README.md b/services/api-server/tests/unit/pact_broker/README.md new file mode 100644 index 00000000000..19620b24009 --- /dev/null +++ b/services/api-server/tests/unit/pact_broker/README.md @@ -0,0 +1,19 @@ +# Contract testing (PACT) + +Maintainer @matusdrobuliak66 + +```bash +PACT_BROKER_URL= PACT_BROKER_USERNAME= PACT_BROKER_PASSWORD= make test-pacts +``` + +## Install and Publish new contract to Broker +Contracts are generated by Consumer (ex. Sim4Life) +TODO: add reference to Sim4life repo where they can be generated +#### Install +```bash +npm install @pact-foundation/pact-cli +``` +#### Publish +```bash +pact-broker publish ./pacts/05_licensed_items.json --tag licensed_items --consumer-app-version 8.2.1 --broker-base-url= --broker-username= --broker-password= +``` diff --git a/services/api-server/tests/unit/pact_broker/conftest.py b/services/api-server/tests/unit/pact_broker/conftest.py new file mode 100644 index 00000000000..e63b68a2012 --- /dev/null +++ b/services/api-server/tests/unit/pact_broker/conftest.py @@ -0,0 +1,110 @@ +import os +from threading import Thread +from time import sleep + +import pytest +import uvicorn +from fastapi import FastAPI +from servicelib.utils import unused_port +from simcore_service_api_server.api.dependencies.authentication import ( + Identity, + get_current_identity, +) + + +def pytest_addoption(parser: pytest.Parser) -> None: + group = parser.getgroup( + "Pact broker contract test", + description="Pact broker contract test specific parameters", + ) + group.addoption( + "--pact-broker-url", + action="store", + default=None, + help="URL pointing to the deployment to be tested", + ) + group.addoption( + "--pact-broker-username", + action="store", + default=None, + help="User name for logging into the deployment", + ) + group.addoption( + "--pact-broker-password", + action="store", + default=None, + help="User name for logging into the deployment", + ) + + +@pytest.fixture() +def pact_broker_credentials( + request: pytest.FixtureRequest, +): + # Get credentials from either CLI arguments or environment variables + broker_url = request.config.getoption("--broker-url", None) or os.getenv( + "PACT_BROKER_URL" + ) + broker_username = request.config.getoption("--broker-username", None) or os.getenv( + "PACT_BROKER_USERNAME" + ) + broker_password = request.config.getoption("--broker-password", None) or os.getenv( + "PACT_BROKER_PASSWORD" + ) + + # Identify missing credentials + missing = [ + name + for name, value in { + "PACT_BROKER_URL": broker_url, + "PACT_BROKER_USERNAME": broker_username, + "PACT_BROKER_PASSWORD": broker_password, + }.items() + if not value + ] + + if missing: + pytest.fail( + f"Missing Pact Broker credentials: {', '.join(missing)}. Set them as environment variables or pass them as CLI arguments." + ) + + return broker_url, broker_username, broker_password + + +def mock_get_current_identity() -> Identity: + return Identity(user_id=1, product_name="osparc", email="test@itis.swiss") + + +@pytest.fixture() +def running_test_server_url( + app: FastAPI, +): + """ + Spins up a FastAPI server in a background thread and yields a base URL. + The 'mocked_catalog_service' fixture ensures the function is already + patched by the time we start the server. + """ + # Override + app.dependency_overrides[get_current_identity] = mock_get_current_identity + + port = unused_port() + base_url = f"http://localhost:{port}" + + config = uvicorn.Config( + app, + host="localhost", + port=port, + log_level="info", + ) + server = uvicorn.Server(config) + + thread = Thread(target=server.run, daemon=True) + thread.start() + + # Wait a bit for the server to be ready + sleep(1) + + yield base_url # , before_server_start + + server.should_exit = True + thread.join() diff --git a/services/api-server/tests/unit/pact_broker/pacts/01_checkout_release.json b/services/api-server/tests/unit/pact_broker/pacts/01_checkout_release.json new file mode 100644 index 00000000000..9fd2d5b7d12 --- /dev/null +++ b/services/api-server/tests/unit/pact_broker/pacts/01_checkout_release.json @@ -0,0 +1,81 @@ +{ + "consumer": { + "name": "Sim4Life" + }, + "provider": { + "name": "OsparcApiServerCheckoutRelease" + }, + "interactions": [ + { + "description": "Checkout one license", + "request": { + "method": "POST", + "path": "/v0/wallets/35/licensed-items/99580844-77fa-41bb-ad70-02dfaf1e3965/checkout", + "headers": { + "Accept": "application/json", + "Content-Type": "application/json" + }, + "body": { + "number_of_seats": 1, + "service_run_id": "1740149365_21a9352a-1d46-41f9-9a9b-42ac888f5afb" + } + }, + "response": { + "status": 200, + "headers": { + "Content-Length": "294", + "Content-Type": "application/json", + "Server": "uvicorn" + }, + "body": { + "key": "MODEL_IX_HEAD", + "licensed_item_checkout_id": "25262183-392c-4268-9311-3c4256c46012", + "licensed_item_id": "99580844-77fa-41bb-ad70-02dfaf1e3965", + "num_of_seats": 1, + "product_name": "s4l", + "started_at": "2025-02-21T15:04:47.673828Z", + "stopped_at": null, + "user_id": 425, + "version": "1.0.0", + "wallet_id": 35 + } + } + }, + { + "description": "Release item", + "request": { + "method": "POST", + "path": "/v0/licensed-items/99580844-77fa-41bb-ad70-02dfaf1e3965/checked-out-items/25262183-392c-4268-9311-3c4256c46012/release", + "headers": { + "Accept": "application/json", + "Content-Type": "application/json" + } + }, + "response": { + "status": 200, + "headers": { + "Content-Length": "319", + "Content-Type": "application/json", + "Server": "uvicorn" + }, + "body": { + "key": "MODEL_IX_HEAD", + "licensed_item_checkout_id": "25262183-392c-4268-9311-3c4256c46012", + "licensed_item_id": "99580844-77fa-41bb-ad70-02dfaf1e3965", + "num_of_seats": 1, + "product_name": "s4l", + "started_at": "2025-02-21T15:04:47.673828Z", + "stopped_at": "2025-02-21T15:04:47.901169Z", + "user_id": 425, + "version": "1.0.0", + "wallet_id": 35 + } + } + } + ], + "metadata": { + "pactSpecification": { + "version": "3.0.0" + } + } +} diff --git a/services/api-server/tests/unit/pact_broker/pacts/05_licensed_items.json b/services/api-server/tests/unit/pact_broker/pacts/05_licensed_items.json new file mode 100644 index 00000000000..4d67be3ff33 --- /dev/null +++ b/services/api-server/tests/unit/pact_broker/pacts/05_licensed_items.json @@ -0,0 +1,156 @@ +{ + "consumer": { + "name": "Sim4Life" + }, + "provider": { + "name": "OsparcApiServerLicensedItems" + }, + "interactions": [ + { + "description": "List all available licensed items", + "request": { + "method": "GET", + "path": "/v0/licensed-items", + "headers": { + "Accept": "application/json", + "Content-Type": "application/json" + } + }, + "response": { + "status": 200, + "headers": { + "Content-Type": "application/json", + "Server": "uvicorn" + }, + "body": { + "items": [ + { + "created_at": "2025-02-19T13:46:30.258102Z", + "display_name": "3 Week Male Mouse V1.0", + "is_hidden_on_market": false, + "key": "MODEL_MOUSE_3W_M_POSABLE", + "licensed_item_id": "f26587de-abad-49cb-9b4f-e6e1fad7f5c1", + "licensed_resource_type": "VIP_MODEL", + "licensed_resources": [ + { + "category_display": "Animal", + "category_id": "AnimalWholeBody", + "source": { + "available_from_url": null, + "description": "Animal Models - 3 Week Male Mouse (B6C3F1) V1.0", + "doi": "10.13099/VIP91206-01-0", + "features": { + "age": "3 weeks", + "date": "2021-03-16", + "functionality": "Posable", + "height": "70 mm", + "name": "B6C3F1N Male 3W", + "sex": "male", + "species": "Mouse", + "version": "1.0", + "weight": "12.3 g" + }, + "id": 138, + "license_key": "MODEL_MOUSE_3W_M_POSABLE", + "license_version": "V1.0", + "protection": "Code", + "thumbnail": "https://itis.swiss/assets/images/Virtual-Population/Animals-Cropped/3WeekMouse.png" + }, + "terms_of_use_url": "https://raw.githubusercontent.com/ITISFoundation/licenses/refs/heads/main/models/User%20License%20Animal%20Models%20v1.x.md" + } + ], + "modified_at": "2025-02-19T13:46:30.258102Z", + "pricing_plan_id": 21, + "version": "1.0.0" + }, + { + "created_at": "2025-02-19T13:46:30.302673Z", + "display_name": "Big Male Rat V1.0", + "is_hidden_on_market": false, + "key": "MODEL_RAT567_M", + "licensed_item_id": "0713928d-9e36-444e-b720-26e97ad7d861", + "licensed_resource_type": "VIP_MODEL", + "licensed_resources": [ + { + "category_display": "Animal", + "category_id": "AnimalWholeBody", + "source": { + "available_from_url": null, + "description": "Animal Models - Big Male Rat V1-x", + "doi": "10.13099/VIP91101-01-0", + "features": { + "date": "2012-01-01", + "functionality": "Static", + "height": "260 mm", + "name": "Big Male Rat", + "sex": "male", + "species": "Rat", + "version": "1.0", + "weight": "567 g" + }, + "id": 21, + "license_key": "MODEL_RAT567_M", + "license_version": "V1.0", + "protection": "Code", + "thumbnail": "https://itis.swiss/assets/images/Virtual-Population/Animals-Cropped/BigMaleRat567g.png" + }, + "terms_of_use_url": "https://raw.githubusercontent.com/ITISFoundation/licenses/refs/heads/main/models/User%20License%20Animal%20Models%20v1.x.md" + }, + { + "category_display": "Animal", + "category_id": "AnimalWholeBody", + "source": { + "available_from_url": null, + "description": "Animal Models - Posable Big Male Rat V1-x", + "doi": "10.13099/VIP91101-01-1", + "features": { + "date": "2018-01-22", + "functionality": "Posable", + "height": "260 mm", + "name": "Big Male Rat", + "sex": "male", + "species": "Rat", + "version": "1.0", + "weight": "567 g" + }, + "id": 111, + "license_key": "MODEL_RAT567_M", + "license_version": "V1.0", + "protection": "Code", + "thumbnail": "https://itis.swiss/assets/images/Virtual-Population/Animals-Cropped/BigMaleRat567g.png" + }, + "terms_of_use_url": "https://raw.githubusercontent.com/ITISFoundation/licenses/refs/heads/main/models/User%20License%20Animal%20Models%20v1.x.md" + } + ], + "modified_at": "2025-02-19T13:46:30.302673Z", + "pricing_plan_id": 21, + "version": "1.0.0" + } + ], + "limit": 20, + "links": { + "first": "/v0/licensed-items?offset=0", + "last": "/v0/licensed-items?offset=0", + "next": null, + "prev": null, + "self": "/v0/licensed-items" + }, + "offset": 0, + "total": 2 + }, + "matchingRules": { + "headers": { + "$.Date": { + "match": "type" + } + } + } + } + } + ], + "metadata": { + "pactSpecification": { + "version": "3.0.0" + } + } +} diff --git a/services/api-server/tests/unit/pact_broker/test_pact_checkout_release.py b/services/api-server/tests/unit/pact_broker/test_pact_checkout_release.py new file mode 100644 index 00000000000..62ff031ad78 --- /dev/null +++ b/services/api-server/tests/unit/pact_broker/test_pact_checkout_release.py @@ -0,0 +1,132 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import os + +import pytest +from fastapi import FastAPI +from models_library.api_schemas_webserver.licensed_items_checkouts import ( + LicensedItemCheckoutRpcGet, +) +from pact.v3 import Verifier +from pytest_mock import MockerFixture +from simcore_service_api_server._meta import API_VERSION +from simcore_service_api_server.api.dependencies.resource_usage_tracker_rpc import ( + get_resource_usage_tracker_client, +) +from simcore_service_api_server.api.dependencies.webserver_rpc import ( + get_wb_api_rpc_client, +) +from simcore_service_api_server.services_rpc.resource_usage_tracker import ( + ResourceUsageTrackerClient, +) +from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient + +# Fake response based on values from 01_checkout_release.json +EXPECTED_CHECKOUT = LicensedItemCheckoutRpcGet.model_validate( + { + "key": "MODEL_IX_HEAD", + "licensed_item_checkout_id": "25262183-392c-4268-9311-3c4256c46012", + "licensed_item_id": "99580844-77fa-41bb-ad70-02dfaf1e3965", + "num_of_seats": 1, + "product_name": "s4l", + "started_at": "2025-02-21T15:04:47.673828Z", + "stopped_at": None, + "user_id": 425, + "version": "1.0.0", + "wallet_id": 35, + } +) +assert EXPECTED_CHECKOUT.stopped_at is None + + +EXPECTED_RELEASE = LicensedItemCheckoutRpcGet.model_validate( + { + "key": "MODEL_IX_HEAD", + "licensed_item_checkout_id": "25262183-392c-4268-9311-3c4256c46012", + "licensed_item_id": "99580844-77fa-41bb-ad70-02dfaf1e3965", + "num_of_seats": 1, + "product_name": "s4l", + "started_at": "2025-02-21T15:04:47.673828Z", + "stopped_at": "2025-02-21T15:04:47.901169Z", + "user_id": 425, + "version": "1.0.0", + "wallet_id": 35, + } +) +assert EXPECTED_RELEASE.stopped_at is not None + + +class DummyRpcClient: + pass + + +@pytest.fixture +async def mock_wb_api_server_rpc(app: FastAPI, mocker: MockerFixture) -> None: + + app.dependency_overrides[get_wb_api_rpc_client] = lambda: WbApiRpcClient( + _client=DummyRpcClient() + ) + + mocker.patch( + "simcore_service_api_server.services_rpc.wb_api_server._checkout_licensed_item_for_wallet", + return_value=EXPECTED_CHECKOUT, + ) + + mocker.patch( + "simcore_service_api_server.services_rpc.wb_api_server._release_licensed_item_for_wallet", + return_value=EXPECTED_RELEASE, + ) + + +@pytest.fixture +async def mock_rut_server_rpc(app: FastAPI, mocker: MockerFixture) -> None: + + app.dependency_overrides[get_resource_usage_tracker_client] = ( + lambda: ResourceUsageTrackerClient(_client=DummyRpcClient()) + ) + + mocker.patch( + "simcore_service_api_server.services_rpc.resource_usage_tracker._get_licensed_item_checkout", + return_value=EXPECTED_CHECKOUT, + ) + + +@pytest.mark.skipif( + not os.getenv("PACT_BROKER_URL"), + reason="This test runs only if PACT_BROKER_URL is provided", +) +def test_provider_against_pact( + pact_broker_credentials: tuple[str, str, str], + mock_wb_api_server_rpc: None, + mock_rut_server_rpc: None, + running_test_server_url: str, +) -> None: + """ + Use the Pact Verifier to check the real provider + against the generated contract. + """ + broker_url, broker_username, broker_password = pact_broker_credentials + + broker_builder = ( + Verifier("OsparcApiServerCheckoutRelease") + .add_transport(url=running_test_server_url) + .broker_source( + broker_url, + username=broker_username, + password=broker_password, + selector=True, + ) + ) + + # NOTE: If you want to filter/test against specific contract use tags + verifier = broker_builder.consumer_tags( + "checkout_release" # <-- Here you define which pact to verify + ).build() + + # Set API version and run verification + verifier.set_publish_options(version=API_VERSION, tags=None, branch=None) + verifier.verify() diff --git a/services/api-server/tests/unit/pact_broker/test_pact_licensed_items.py b/services/api-server/tests/unit/pact_broker/test_pact_licensed_items.py new file mode 100644 index 00000000000..f04e4bd4737 --- /dev/null +++ b/services/api-server/tests/unit/pact_broker/test_pact_licensed_items.py @@ -0,0 +1,185 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import os + +import pytest +from fastapi import FastAPI +from models_library.api_schemas_webserver.licensed_items import ( + LicensedItemRpcGet, + LicensedItemRpcGetPage, +) +from pact.v3 import Verifier +from pytest_mock import MockerFixture +from simcore_service_api_server._meta import API_VERSION +from simcore_service_api_server.api.dependencies.webserver_rpc import ( + get_wb_api_rpc_client, +) +from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient + +# Fake response based on values from 05_licensed_items.json +EXPECTED_LICENSED_ITEMS = [ + { + "created_at": "2025-02-19T13:46:30.258102Z", + "display_name": "3 Week Male Mouse V1.0", + "is_hidden_on_market": False, + "key": "MODEL_MOUSE_3W_M_POSABLE", + "licensed_item_id": "f26587de-abad-49cb-9b4f-e6e1fad7f5c1", + "licensed_resource_type": "VIP_MODEL", + "licensed_resources": [ + { + "category_display": "Animal", + "category_id": "AnimalWholeBody", + "source": { + "available_from_url": None, + "description": "Animal Models - 3 Week Male Mouse (B6C3F1) V1.0", + "doi": "10.13099/VIP91206-01-0", + "features": { + "age": "3 weeks", + "date": "2021-03-16", + "functionality": "Posable", + "height": "70 mm", + "name": "B6C3F1N Male 3W", + "sex": "male", + "species": "Mouse", + "version": "1.0", + "weight": "12.3 g", + }, + "id": 138, + "license_key": "MODEL_MOUSE_3W_M_POSABLE", + "license_version": "V1.0", + "protection": "Code", + "thumbnail": "https://itis.swiss/assets/images/Virtual-Population/Animals-Cropped/3WeekMouse.png", + }, + "terms_of_use_url": "https://raw.githubusercontent.com/ITISFoundation/licenses/refs/heads/main/models/User%20License%20Animal%20Models%20v1.x.md", + } + ], + "modified_at": "2025-02-19T13:46:30.258102Z", + "pricing_plan_id": 21, + "version": "1.0.0", + }, + { + "created_at": "2025-02-19T13:46:30.302673Z", + "display_name": "Big Male Rat V1.0", + "is_hidden_on_market": False, + "key": "MODEL_RAT567_M", + "licensed_item_id": "0713928d-9e36-444e-b720-26e97ad7d861", + "licensed_resource_type": "VIP_MODEL", + "licensed_resources": [ + { + "category_display": "Animal", + "category_id": "AnimalWholeBody", + "source": { + "available_from_url": None, + "description": "Animal Models - Big Male Rat V1-x", + "doi": "10.13099/VIP91101-01-0", + "features": { + "date": "2012-01-01", + "functionality": "Static", + "height": "260 mm", + "name": "Big Male Rat", + "sex": "male", + "species": "Rat", + "version": "1.0", + "weight": "567 g", + }, + "id": 21, + "license_key": "MODEL_RAT567_M", + "license_version": "V1.0", + "protection": "Code", + "thumbnail": "https://itis.swiss/assets/images/Virtual-Population/Animals-Cropped/BigMaleRat567g.png", + }, + "terms_of_use_url": "https://raw.githubusercontent.com/ITISFoundation/licenses/refs/heads/main/models/User%20License%20Animal%20Models%20v1.x.md", + }, + { + "category_display": "Animal", + "category_id": "AnimalWholeBody", + "source": { + "available_from_url": None, + "description": "Animal Models - Posable Big Male Rat V1-x", + "doi": "10.13099/VIP91101-01-1", + "features": { + "date": "2018-01-22", + "functionality": "Posable", + "height": "260 mm", + "name": "Big Male Rat", + "sex": "male", + "species": "Rat", + "version": "1.0", + "weight": "567 g", + }, + "id": 111, + "license_key": "MODEL_RAT567_M", + "license_version": "V1.0", + "protection": "Code", + "thumbnail": "https://itis.swiss/assets/images/Virtual-Population/Animals-Cropped/BigMaleRat567g.png", + }, + "terms_of_use_url": "https://raw.githubusercontent.com/ITISFoundation/licenses/refs/heads/main/models/User%20License%20Animal%20Models%20v1.x.md", + }, + ], + "modified_at": "2025-02-19T13:46:30.302673Z", + "pricing_plan_id": 21, + "version": "1.0.0", + }, +] + + +EXPECTED_LICENSED_ITEMS_PAGE = LicensedItemRpcGetPage( + items=[LicensedItemRpcGet.model_validate(item) for item in EXPECTED_LICENSED_ITEMS], + total=len(EXPECTED_LICENSED_ITEMS), +) + + +class DummyRpcClient: + pass + + +@pytest.fixture +async def mock_wb_api_server_rpc(app: FastAPI, mocker: MockerFixture) -> None: + + app.dependency_overrides[get_wb_api_rpc_client] = lambda: WbApiRpcClient( + _client=DummyRpcClient() + ) + + mocker.patch( + "simcore_service_api_server.services_rpc.wb_api_server._get_licensed_items", + return_value=EXPECTED_LICENSED_ITEMS_PAGE, + ) + + +@pytest.mark.skipif( + not os.getenv("PACT_BROKER_URL"), + reason="This test runs only if PACT_BROKER_URL is provided", +) +def test_provider_against_pact( + pact_broker_credentials: tuple[str, str, str], + mock_wb_api_server_rpc: None, + running_test_server_url: str, +) -> None: + """ + Use the Pact Verifier to check the real provider + against the generated contract. + """ + broker_url, broker_username, broker_password = pact_broker_credentials + + broker_builder = ( + Verifier("OsparcApiServerLicensedItems") + .add_transport(url=running_test_server_url) + .broker_source( + broker_url, + username=broker_username, + password=broker_password, + selector=True, + ) + ) + + # NOTE: If you want to filter/test against specific contract use tags + verifier = broker_builder.consumer_tags( + "licensed_items" # <-- Here you define which pact to verify + ).build() + + # Set API version and run verification + verifier.set_publish_options(version=API_VERSION, tags=None, branch=None) + verifier.verify() diff --git a/services/api-server/tests/unit/test__models_examples.py b/services/api-server/tests/unit/test__models_examples.py index 225b4b01c95..78931fd264f 100644 --- a/services/api-server/tests/unit/test__models_examples.py +++ b/services/api-server/tests/unit/test__models_examples.py @@ -1,11 +1,13 @@ -import json from itertools import chain from typing import Any import pytest import simcore_service_api_server.models.schemas from pydantic import BaseModel -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) @pytest.mark.parametrize( @@ -13,8 +15,8 @@ chain(walk_model_examples_in_package(simcore_service_api_server.models)), ) def test_all_models_library_models_config_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - assert model_cls.model_validate( - example_data - ), f"Failed {example_name} : {json.dumps(example_data)}" + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/services/api-server/tests/unit/test_api__study_workflows.py b/services/api-server/tests/unit/test_api__study_workflows.py index b0b7d306e77..dc6abbdadaf 100644 --- a/services/api-server/tests/unit/test_api__study_workflows.py +++ b/services/api-server/tests/unit/test_api__study_workflows.py @@ -3,11 +3,11 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments +import asyncio import functools import io import json import textwrap -import time from contextlib import suppress from pathlib import Path from typing import TypedDict @@ -293,7 +293,7 @@ async def test_run_study_workflow( ) print(f"Status: [{job_status.state}]") - time.sleep(1) + await asyncio.sleep(1) print(await studies_api.inspect_study_job(study_id=template_id, job_id=new_job.id)) diff --git a/services/api-server/tests/unit/test_api_files.py b/services/api-server/tests/unit/test_api_files.py index 6b522e737ea..323332f1f5b 100644 --- a/services/api-server/tests/unit/test_api_files.py +++ b/services/api-server/tests/unit/test_api_files.py @@ -18,7 +18,7 @@ from fastapi import status from fastapi.encoders import jsonable_encoder from httpx import AsyncClient -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( ETag, FileUploadCompletionBody, UploadedPart, diff --git a/services/api-server/tests/unit/test_credits.py b/services/api-server/tests/unit/test_credits.py index 3630e218754..f9548949b81 100644 --- a/services/api-server/tests/unit/test_credits.py +++ b/services/api-server/tests/unit/test_credits.py @@ -2,7 +2,7 @@ from fastapi import status from httpx import AsyncClient, BasicAuth -from models_library.api_schemas_webserver.product import GetCreditPrice +from models_library.api_schemas_webserver.products import CreditPriceGet from pytest_simcore.helpers.httpx_calls_capture_models import CreateRespxMockCallback from simcore_service_api_server._meta import API_VTAG @@ -23,4 +23,4 @@ async def test_get_credits_price( response = await client.get(f"{API_VTAG}/credits/price", auth=auth) assert response.status_code == status.HTTP_200_OK - _ = GetCreditPrice.model_validate(response.json()) + _ = CreditPriceGet.model_validate(response.json()) diff --git a/services/api-server/tests/unit/test_licensed_items.py b/services/api-server/tests/unit/test_licensed_items.py index 6542589cb32..f396f1f397b 100644 --- a/services/api-server/tests/unit/test_licensed_items.py +++ b/services/api-server/tests/unit/test_licensed_items.py @@ -21,7 +21,7 @@ from models_library.api_schemas_webserver.licensed_items_checkouts import ( LicensedItemCheckoutRpcGet, ) -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, ) diff --git a/services/api-server/tests/unit/test_models.py b/services/api-server/tests/unit/test_models.py index b3e1f48a57a..0df9f63fc70 100644 --- a/services/api-server/tests/unit/test_models.py +++ b/services/api-server/tests/unit/test_models.py @@ -3,13 +3,15 @@ # pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -import json from typing import Any import pytest import simcore_service_api_server.models from pydantic import BaseModel -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) from simcore_postgres_database.models.users import UserRole from simcore_service_api_server.models.schemas.profiles import UserRoleEnum @@ -21,9 +23,9 @@ def test_api_server_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.model_validate( - example_data - ), f"Failed {example_name} : {json.dumps(example_data)}" + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) def test_enums_in_sync(): diff --git a/services/api-server/tests/unit/test_models_schemas_files.py b/services/api-server/tests/unit/test_models_schemas_files.py index 578216dd253..bd7cfddfaf8 100644 --- a/services/api-server/tests/unit/test_models_schemas_files.py +++ b/services/api-server/tests/unit/test_models_schemas_files.py @@ -11,7 +11,9 @@ import pytest from fastapi import UploadFile -from models_library.api_schemas_storage import FileMetaDataGet as StorageFileMetaData +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet as StorageFileMetaData, +) from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import StorageFileID from pydantic import TypeAdapter, ValidationError @@ -81,7 +83,7 @@ async def test_create_filemetadata_from_starlette_uploadfile( def test_convert_between_file_models(): storage_file_meta = StorageFileMetaData( - **StorageFileMetaData.model_config["json_schema_extra"]["examples"][1] + **StorageFileMetaData.model_json_schema()["examples"][1] ) storage_file_meta.file_id = TypeAdapter(StorageFileID).validate_python( f"api/{uuid4()}/extensionless" diff --git a/services/autoscaling/docker/boot.sh b/services/autoscaling/docker/boot.sh index c97dedd7afa..a9adc4cda70 100755 --- a/services/autoscaling/docker/boot.sh +++ b/services/autoscaling/docker/boot.sh @@ -24,14 +24,14 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/autoscaling - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - uv pip list fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index 0f13c0e7eaf..bffe841d719 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -87,7 +87,32 @@ attrs==24.2.0 # jsonschema # referencing boto3==1.35.36 - # via aiobotocore + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiobotocore botocore==1.35.36 # via # aiobotocore @@ -165,6 +190,9 @@ fastapi==0.115.6 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.33 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -451,6 +479,8 @@ psutil==6.1.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # distributed +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.3 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -526,6 +556,30 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -607,6 +661,30 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -664,6 +742,10 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in tblib==3.0.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -694,13 +776,13 @@ typer==0.15.1 # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -types-aiobotocore==2.15.2.post3 +types-aiobotocore==2.19.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -types-aiobotocore-ec2==2.15.2 +types-aiobotocore-ec2==2.19.0 # via types-aiobotocore -types-aiobotocore-s3==2.15.2.post1 +types-aiobotocore-s3==2.19.0 # via types-aiobotocore -types-aiobotocore-ssm==2.15.2 +types-aiobotocore-ssm==2.19.0 # via types-aiobotocore types-awscrt==0.23.3 # via botocore-stubs diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index 9cfd356c089..bffd28ffda7 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -15,7 +15,7 @@ attrs==24.2.0 # -c requirements/_base.txt # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -47,7 +47,7 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto charset-normalizer==3.4.0 # via @@ -57,24 +57,24 @@ click==8.1.7 # via # -c requirements/_base.txt # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto -deepdiff==8.1.1 +deepdiff==8.2.0 # via -r requirements/_test.in docker==7.1.0 # via # -r requirements/_test.in # moto -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in -fakeredis==2.26.2 +fakeredis==2.27.0 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -82,7 +82,7 @@ flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto graphql-core==3.2.6 # via moto @@ -123,9 +123,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -155,10 +153,8 @@ markupsafe==3.0.2 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy networkx==3.4.2 @@ -167,7 +163,7 @@ openapi-schema-validator==0.6.3 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via moto -orderly-set==5.2.3 +orderly-set==5.3.0 # via deepdiff packaging==24.2 # via @@ -186,7 +182,7 @@ psutil==6.1.0 # via # -c requirements/_base.txt # -r requirements/_test.in -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi @@ -201,7 +197,7 @@ pydantic-core==2.27.1 # pydantic pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -227,7 +223,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -238,7 +233,6 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -278,7 +272,7 @@ s3transfer==0.10.4 # via # -c requirements/_base.txt # boto3 -setuptools==75.8.0 +setuptools==75.8.2 # via moto six==1.17.0 # via @@ -298,21 +292,21 @@ sympy==1.13.3 # via cfn-lint termcolor==2.5.0 # via pytest-sugar -types-aiobotocore==2.15.2.post3 +types-aiobotocore==2.19.0 # via # -c requirements/_base.txt # -r requirements/_test.in -types-aiobotocore-ec2==2.15.2 +types-aiobotocore-ec2==2.19.0 # via # -c requirements/_base.txt # types-aiobotocore -types-aiobotocore-iam==2.15.2 +types-aiobotocore-iam==2.19.0 # via types-aiobotocore -types-aiobotocore-s3==2.15.2.post1 +types-aiobotocore-s3==2.19.0 # via # -c requirements/_base.txt # types-aiobotocore -types-aiobotocore-ssm==2.15.2 +types-aiobotocore-ssm==2.19.0 # via # -c requirements/_base.txt # types-aiobotocore @@ -328,7 +322,6 @@ typing-extensions==4.12.2 # anyio # aws-sam-translator # cfn-lint - # faker # pydantic # pydantic-core # types-aiobotocore @@ -336,6 +329,8 @@ typing-extensions==4.12.2 # types-aiobotocore-iam # types-aiobotocore-s3 # types-aiobotocore-ssm +tzdata==2025.1 + # via faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt @@ -347,6 +342,7 @@ urllib3==2.2.3 werkzeug==3.1.3 # via # flask + # flask-cors # moto wrapt==1.17.0 # via diff --git a/services/autoscaling/requirements/_tools.txt b/services/autoscaling/requirements/_tools.txt index 473aca0228f..6c7b9431172 100644 --- a/services/autoscaling/requirements/_tools.txt +++ b/services/autoscaling/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -44,7 +44,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via # -c requirements/_test.txt # pip-tools @@ -81,7 +81,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/application.py b/services/autoscaling/src/simcore_service_autoscaling/core/application.py index 6261232bce5..f70a8be583c 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/application.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/application.py @@ -1,7 +1,7 @@ import logging from fastapi import FastAPI -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -71,7 +71,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: setup_auto_scaler_background_task(app) setup_buffer_machines_pool_task(app) if app.state.settings.AUTOSCALING_TRACING: - setup_tracing(app, app.state.settings.AUTOSCALING_TRACING, APP_NAME) + initialize_tracing(app, app.state.settings.AUTOSCALING_TRACING, APP_NAME) # ERROR HANDLERS diff --git a/services/autoscaling/tests/manual/docker-compose.yml b/services/autoscaling/tests/manual/docker-compose.yml index 1120fad39be..a28712fb0af 100644 --- a/services/autoscaling/tests/manual/docker-compose.yml +++ b/services/autoscaling/tests/manual/docker-compose.yml @@ -1,6 +1,6 @@ services: rabbit: - image: itisfoundation/rabbitmq:3.11.2-management + image: itisfoundation/rabbitmq:3.13.7-management init: true hostname: "{{.Node.Hostname}}-{{.Service.Name}}-{{.Task.Slot}}" ports: diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 4ae3f0ccd13..9f82282ad11 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -38,7 +38,9 @@ DockerLabelKey, StandardSimcoreDockerLabels, ) -from models_library.generated_models.docker_rest_api import Availability +from models_library.generated_models.docker_rest_api import ( + Availability, +) from models_library.generated_models.docker_rest_api import Node as DockerNode from models_library.generated_models.docker_rest_api import ( NodeDescription, @@ -757,7 +759,7 @@ async def _() -> None: ), f"service {found_service['Spec']['Name']}'s task is {service_task['Status']['State']}" ctx.logger.info( "%s", - f"service {found_service['Spec']['Name']} is now {service_task['Status']['State']} {'.'*number_of_success['count']}", + f"service {found_service['Spec']['Name']} is now {service_task['Status']['State']} {'.' * number_of_success['count']}", ) number_of_success["count"] += 1 assert (number_of_success["count"] * WAIT_TIME) >= SUCCESS_STABLE_TIME_S @@ -774,7 +776,6 @@ def aws_allowed_ec2_instance_type_names() -> list[InstanceTypeType]: return [ "t2.xlarge", "t2.2xlarge", - "g3.4xlarge", "g4dn.2xlarge", "g4dn.8xlarge", "r5n.4xlarge", diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py index bad4215a65e..207296dc1b5 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py @@ -85,8 +85,7 @@ def minimal_configuration( disable_dynamic_service_background_task: None, disable_buffers_pool_background_task: None, mocked_redis_server: None, -) -> None: - ... +) -> None: ... @pytest.fixture @@ -582,11 +581,11 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 available=with_drain_nodes_labelled, ) # update our fake node - fake_attached_node.spec.labels[ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] = mock_docker_tag_node.call_args_list[0][1]["tags"][ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + mock_docker_tag_node.call_args_list[0][1]["tags"][ + _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY + ] + ) # check the activate time is later than attach time assert arrow.get( mock_docker_tag_node.call_args_list[1][1]["tags"][ @@ -611,11 +610,11 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 available=True, ) # update our fake node - fake_attached_node.spec.labels[ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] = mock_docker_tag_node.call_args_list[1][1]["tags"][ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + mock_docker_tag_node.call_args_list[1][1]["tags"][ + _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY + ] + ) mock_docker_tag_node.reset_mock() mock_docker_set_node_availability.assert_not_called() mock_rabbitmq_post_message.assert_called_once() @@ -756,9 +755,9 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 # we artifically set the node to drain fake_attached_node.spec.availability = Availability.drain fake_attached_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" - fake_attached_node.spec.labels[ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] = datetime.datetime.now(tz=datetime.UTC).isoformat() + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + datetime.datetime.now(tz=datetime.UTC).isoformat() + ) # the node will be not be terminated before the timeout triggers assert app_settings.AUTOSCALING_EC2_INSTANCES @@ -960,7 +959,7 @@ async def test_cluster_does_not_scale_up_if_defined_instance_is_not_fitting_reso cpus=5, ram=TypeAdapter(ByteSize).validate_python("36Gib") ), num_tasks=10, - expected_instance_type="g3.4xlarge", + expected_instance_type="r5n.4xlarge", # 32 cpus, 128Gib expected_num_instances=4, ), id="isolve", @@ -1429,12 +1428,12 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( [ pytest.param( _ScaleUpParams( - imposed_instance_type="g3.4xlarge", # 1 GPU, 16 CPUs, 122GiB + imposed_instance_type="g4dn.2xlarge", # 1 GPU, 8 CPUs, 32GiB task_resources=Resources( - cpus=16, ram=TypeAdapter(ByteSize).validate_python("30Gib") + cpus=8, ram=TypeAdapter(ByteSize).validate_python("15Gib") ), num_tasks=12, - expected_instance_type="g3.4xlarge", # 1 GPU, 16 CPUs, 122GiB + expected_instance_type="g4dn.2xlarge", # 1 GPU, 8 CPUs, 32GiB expected_num_instances=10, ), _ScaleUpParams( @@ -1446,7 +1445,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( expected_instance_type="g4dn.8xlarge", # 32CPUs, 128GiB expected_num_instances=7, ), - id="A batch of services requiring g3.4xlarge and a batch requiring g4dn.8xlarge", + id="A batch of services requiring g4dn.2xlarge and a batch requiring g4dn.8xlarge", ), ], ) diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py index 6bb3a865bbe..6cae25e72ff 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py @@ -204,8 +204,7 @@ def minimal_configuration( disable_dynamic_service_background_task: None, disable_buffers_pool_background_task: None, mocked_redis_server: None, -) -> None: - ... +) -> None: ... def _assert_rabbit_autoscaling_message_sent( @@ -626,11 +625,11 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: ) # update our fake node fake_attached_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" - fake_attached_node.spec.labels[ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] = mock_docker_tag_node.call_args_list[2][1]["tags"][ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + mock_docker_tag_node.call_args_list[2][1]["tags"][ + _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY + ] + ) # check the activate time is later than attach time assert arrow.get( mock_docker_tag_node.call_args_list[1][1]["tags"][ @@ -661,11 +660,11 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: available=True, ) # update our fake node - fake_attached_node.spec.labels[ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] = mock_docker_tag_node.call_args_list[1][1]["tags"][ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + mock_docker_tag_node.call_args_list[1][1]["tags"][ + _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY + ] + ) mock_docker_tag_node.reset_mock() mock_docker_set_node_availability.assert_not_called() @@ -852,9 +851,9 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: if not with_drain_nodes_labelled: fake_attached_node.spec.availability = Availability.drain fake_attached_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" - fake_attached_node.spec.labels[ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] = datetime.datetime.now(tz=datetime.UTC).isoformat() + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + datetime.datetime.now(tz=datetime.UTC).isoformat() + ) # the node will not be terminated before the timeout triggers assert app_settings.AUTOSCALING_EC2_INSTANCES @@ -1150,7 +1149,7 @@ async def test_cluster_scaling_up_and_down_against_aws( cpus=5, ram=TypeAdapter(ByteSize).validate_python("36Gib") ), num_services=10, - expected_instance_type="g3.4xlarge", # 1 GPU, 16 CPUs, 122GiB + expected_instance_type="r5n.4xlarge", # 1 GPU, 16 CPUs, 128GiB expected_num_instances=4, ), id="sim4life-light", @@ -1238,12 +1237,12 @@ async def test_cluster_scaling_up_starts_multiple_instances( [ pytest.param( _ScaleUpParams( - imposed_instance_type="g3.4xlarge", # 1 GPU, 16 CPUs, 122GiB + imposed_instance_type="g4dn.2xlarge", # 1 GPU, 8 CPUs, 32GiB service_resources=Resources( - cpus=16, ram=TypeAdapter(ByteSize).validate_python("30Gib") + cpus=8, ram=TypeAdapter(ByteSize).validate_python("15Gib") ), num_services=12, - expected_instance_type="g3.4xlarge", # 1 GPU, 16 CPUs, 122GiB + expected_instance_type="g4dn.2xlarge", # 1 GPU, 8 CPUs, 32GiB expected_num_instances=10, ), _ScaleUpParams( @@ -2300,9 +2299,9 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 # simulate one of the hot buffer is not drained anymore and took the pending service random_fake_node = random.choice(fake_hot_buffer_nodes) # noqa: S311 random_fake_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" - random_fake_node.spec.labels[ - _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY - ] = arrow.utcnow().isoformat() + random_fake_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + arrow.utcnow().isoformat() + ) random_fake_node.spec.availability = Availability.active # simulate the fact that the warm buffer that just started is not yet visible mock_find_node_with_name_returns_fake_node.return_value = None diff --git a/services/catalog/VERSION b/services/catalog/VERSION index bcaffe19b5b..8adc70fdd9d 100644 --- a/services/catalog/VERSION +++ b/services/catalog/VERSION @@ -1 +1 @@ -0.7.0 \ No newline at end of file +0.8.0 \ No newline at end of file diff --git a/services/catalog/docker/boot.sh b/services/catalog/docker/boot.sh index f7f01aec46e..ae506626b73 100755 --- a/services/catalog/docker/boot.sh +++ b/services/catalog/docker/boot.sh @@ -19,7 +19,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/catalog - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -27,7 +27,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # RUNNING application ---------------------------------------- diff --git a/services/catalog/openapi.json b/services/catalog/openapi.json index 3389e912cf8..9fbd4987898 100644 --- a/services/catalog/openapi.json +++ b/services/catalog/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "simcore-service-catalog", "description": "Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)", - "version": "0.7.0" + "version": "0.8.0" }, "paths": { "/": { @@ -603,92 +603,6 @@ } } } - }, - "patch": { - "tags": [ - "services" - ], - "summary": "Update Service", - "operationId": "update_service_v0_services__service_key___service_version__patch", - "parameters": [ - { - "name": "service_key", - "in": "path", - "required": true, - "schema": { - "type": "string", - "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "title": "Service Key" - } - }, - { - "name": "service_version", - "in": "path", - "required": true, - "schema": { - "type": "string", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "title": "Service Version" - } - }, - { - "name": "user_id", - "in": "query", - "required": true, - "schema": { - "type": "integer", - "title": "User Id" - } - }, - { - "name": "x-simcore-products-name", - "in": "header", - "required": false, - "schema": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "X-Simcore-Products-Name" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ServiceUpdate" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ServiceGet" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } } } }, @@ -2693,6 +2607,18 @@ "description": "Display name: short, human readable name for the node" }, "thumbnail": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Thumbnail", + "description": "URL to the service thumbnail" + }, + "icon": { "anyOf": [ { "type": "string", @@ -2704,8 +2630,8 @@ "type": "null" } ], - "title": "Thumbnail", - "description": "url to the thumbnail" + "title": "Icon", + "description": "URL to the service icon" }, "description": { "type": "string", @@ -2728,7 +2654,7 @@ } ], "title": "Version Display", - "description": "A user-friendly or marketing name for the release. This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.' This name is not used for version comparison but is useful for communication and documentation purposes." + "description": "A user-friendly or marketing name for the release.This can be used to reference the release in a more readable and recognizable format, such as 'Matterhorn Release,' 'Spring Update,' or 'Holiday Edition.' This name is not used for version comparison but is useful for communication and documentation purposes." }, "deprecated": { "anyOf": [ @@ -3368,193 +3294,6 @@ ], "title": "ServiceType" }, - "ServiceUpdate": { - "properties": { - "accessRights": { - "anyOf": [ - { - "additionalProperties": { - "$ref": "#/components/schemas/ServiceGroupAccessRights" - }, - "type": "object" - }, - { - "type": "null" - } - ], - "title": "Accessrights", - "description": "service access rights per group id" - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Name" - }, - "thumbnail": { - "anyOf": [ - { - "type": "string", - "maxLength": 2083, - "minLength": 1, - "format": "uri" - }, - { - "type": "null" - } - ], - "title": "Thumbnail" - }, - "description": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Description" - }, - "description_ui": { - "type": "boolean", - "title": "Description Ui", - "default": false - }, - "version_display": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Version Display" - }, - "deprecated": { - "anyOf": [ - { - "type": "string", - "format": "date-time" - }, - { - "type": "null" - } - ], - "title": "Deprecated", - "description": "Owner can set the date to retire the service. Three possibilities:If None, the service is marked as `published`;If now=deprecated, the service is retired" - }, - "classifiers": { - "anyOf": [ - { - "items": { - "type": "string" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Classifiers" - }, - "quality": { - "type": "object", - "title": "Quality", - "default": {} - } - }, - "type": "object", - "required": [ - "name", - "thumbnail", - "description", - "classifiers" - ], - "title": "ServiceUpdate", - "example": { - "accessRights": { - "1": { - "execute_access": false, - "write_access": false - }, - "2": { - "execute_access": true, - "write_access": true - }, - "44": { - "execute_access": false, - "write_access": false - } - }, - "classifiers": [ - "RRID:SCR_018997", - "RRID:SCR_019001" - ], - "description": "An interesting service that does something", - "name": "My Human Readable Service Name", - "quality": { - "annotations": { - "certificationLink": "", - "certificationStatus": "Uncertified", - "documentation": "", - "limitations": "", - "purpose": "", - "standards": "", - "vandv": "" - }, - "enabled": true, - "tsr": { - "r01": { - "level": 3, - "references": "" - }, - "r02": { - "level": 2, - "references": "" - }, - "r03": { - "level": 0, - "references": "" - }, - "r04": { - "level": 0, - "references": "" - }, - "r05": { - "level": 2, - "references": "" - }, - "r06": { - "level": 0, - "references": "" - }, - "r07": { - "level": 0, - "references": "" - }, - "r08": { - "level": 1, - "references": "" - }, - "r09": { - "level": 0, - "references": "" - }, - "r10": { - "level": 0, - "references": "" - } - } - } - } - }, "Spread": { "properties": { "SpreadDescriptor": { diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index e8500375a38..f003df17d88 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.3 +aio-pika==9.5.5 # via -r requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 # via @@ -10,9 +10,9 @@ aiodocker==0.24.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.10 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -31,13 +31,13 @@ aiohttp==3.11.10 # aiodocker aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp -alembic==1.14.0 +alembic==1.15.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in annotated-types==0.7.0 # via pydantic -anyio==4.7.0 +anyio==4.8.0 # via # fast-depends # faststream @@ -55,12 +55,12 @@ asyncpg==0.30.0 # via # -r requirements/_base.in # sqlalchemy -attrs==24.2.0 +attrs==25.2.0 # via # aiohttp # jsonschema # referencing -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -79,14 +79,14 @@ certifi==2024.8.30 # httpcore # httpx # requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests -click==8.1.7 +click==8.1.8 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.15 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -102,25 +102,28 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.6 +fastapi==0.115.11 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in -fastapi-cli==0.0.6 + # fastapi-lifespan-manager +fastapi-cli==0.0.7 # via fastapi -faststream==0.5.33 +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +faststream==0.5.35 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.69.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http greenlet==3.1.1 # via sqlalchemy -grpcio==1.68.1 +grpcio==1.71.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via @@ -130,7 +133,7 @@ httpcore==1.0.7 # via httpx httptools==0.6.4 # via uvicorn -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -156,11 +159,11 @@ idna==3.10 # httpx # requests # yarl -importlib-metadata==8.5.0 +importlib-metadata==8.6.1 # via opentelemetry-api itsdangerous==2.2.0 # via fastapi -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -183,7 +186,7 @@ jsonschema==4.23.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.7 +mako==1.3.9 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -214,7 +217,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.2 +opentelemetry-api==1.31.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -229,17 +232,17 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.31.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.31.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.31.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.31.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.52b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg @@ -248,31 +251,31 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.52b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.49b2 +opentelemetry-instrumentation-asyncpg==0.52b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.49b2 +opentelemetry-instrumentation-fastapi==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.31.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.31.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.52b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -282,13 +285,13 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.52b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests -orjson==3.10.12 +orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -324,21 +327,23 @@ prometheus-client==0.21.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # prometheus-fastapi-instrumentator -prometheus-fastapi-instrumentator==7.0.0 +prometheus-fastapi-instrumentator==7.0.2 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.1 +protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.0 +psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via sqlalchemy -pydantic==2.10.3 +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -372,9 +377,9 @@ pydantic==2.10.3 # fastapi # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.3 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -386,16 +391,30 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # fastapi -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi -pygments==2.18.0 +pygments==2.19.1 # via rich -pyinstrument==5.0.0 +pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -403,7 +422,7 @@ python-dotenv==1.0.1 # via # pydantic-settings # uvicorn -python-multipart==0.0.19 +python-multipart==0.0.20 # via fastapi pyyaml==6.0.2 # via @@ -445,6 +464,20 @@ redis==5.2.1 # aiocache referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -455,9 +488,9 @@ rich==13.9.4 # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.12.0 +rich-toolkit==0.13.2 # via fastapi-cli -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -466,9 +499,7 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx + # via anyio sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -487,7 +518,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.41.3 +starlette==0.46.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -505,6 +536,8 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -513,7 +546,7 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -550,7 +583,7 @@ ujson==5.10.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi -urllib3==2.2.3 +urllib3==2.3.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -567,18 +600,18 @@ urllib3==2.2.3 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests -uvicorn==0.32.1 +uvicorn==0.34.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi # fastapi-cli uvloop==0.21.0 # via uvicorn -watchfiles==1.0.0 +watchfiles==1.0.4 # via uvicorn -websockets==14.1 +websockets==15.0.1 # via uvicorn -wrapt==1.17.0 +wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index be2c436b3a4..4507c72f4c6 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -1,53 +1,53 @@ -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.10 +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pytest-aiohttp -aiosignal==1.3.1 +aiosignal==1.3.2 # via # -c requirements/_base.txt # aiohttp -alembic==1.14.0 +alembic==1.15.1 # via # -c requirements/_base.txt # -r requirements/_test.in -anyio==4.7.0 +anyio==4.8.0 # via # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -attrs==24.2.0 +attrs==25.2.0 # via # -c requirements/_base.txt # aiohttp # jsonschema # pytest-docker # referencing -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # httpcore # httpx # requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # -r requirements/_test.in -coverage==7.6.10 +coverage==7.6.12 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==35.0.0 +faker==37.0.0 # via -r requirements/_test.in frozenlist==1.5.0 # via @@ -66,7 +66,7 @@ httpcore==1.0.7 # via # -c requirements/_base.txt # httpx -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -88,7 +88,7 @@ jsonschema-specifications==2024.10.1 # via # -c requirements/_base.txt # jsonschema -mako==1.3.7 +mako==1.3.9 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -102,7 +102,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -112,7 +112,7 @@ packaging==24.2 # pytest pluggy==1.5.0 # via pytest -propcache==0.2.1 +propcache==0.3.0 # via # -c requirements/_base.txt # aiohttp @@ -121,7 +121,7 @@ ptvsd==4.3.2 # via -r requirements/_test.in py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-aiohttp @@ -140,16 +140,12 @@ pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-mock==3.14.0 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker referencing==0.35.1 # via # -c requirements/../../../requirements/constraints.txt @@ -162,21 +158,16 @@ requests==2.32.3 # docker respx==0.22.0 # via -r requirements/_test.in -rpds-py==0.22.3 +rpds-py==0.23.1 # via # -c requirements/_base.txt # jsonschema # referencing -six==1.17.0 - # via - # -c requirements/_base.txt - # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt # anyio # asgi-lifespan - # httpx sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt @@ -194,10 +185,11 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # alembic # anyio - # faker # mypy # sqlalchemy2-stubs -urllib3==2.2.3 +tzdata==2025.1 + # via faker +urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt index d3e4f84003d..4ef3f43c67d 100644 --- a/services/catalog/requirements/_tools.txt +++ b/services/catalog/requirements/_tools.txt @@ -1,6 +1,6 @@ -astroid==3.3.8 +astroid==3.3.9 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.9 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -47,7 +47,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -58,7 +58,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.5 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -69,9 +69,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==76.0.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -80,7 +80,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.3 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/catalog/setup.cfg b/services/catalog/setup.cfg index 401431f420a..031198b2fdd 100644 --- a/services/catalog/setup.cfg +++ b/services/catalog/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.7.0 +current_version = 0.8.0 commit = True message = services/catalog version: {current_version} → {new_version} tag = False diff --git a/services/catalog/src/simcore_service_catalog/api/rest/_services.py b/services/catalog/src/simcore_service_catalog/api/rest/_services.py index 78362d63733..4f77c0f8a49 100644 --- a/services/catalog/src/simcore_service_catalog/api/rest/_services.py +++ b/services/catalog/src/simcore_service_catalog/api/rest/_services.py @@ -2,13 +2,12 @@ import asyncio import logging -import urllib.parse from typing import Annotated, Any, TypeAlias, cast from aiocache import cached # type: ignore[import-untyped] from fastapi import APIRouter, Depends, Header, HTTPException, status -from models_library.api_schemas_catalog.services import ServiceGet, ServiceUpdate -from models_library.services import ServiceKey, ServiceType, ServiceVersion +from models_library.api_schemas_catalog.services import ServiceGet +from models_library.services import ServiceType from models_library.services_authoring import Author from models_library.services_metadata_published import ServiceMetaDataPublished from pydantic import ValidationError @@ -23,9 +22,8 @@ ) from ...db.repositories.groups import GroupsRepository from ...db.repositories.services import ServicesRepository -from ...models.services_db import ServiceAccessRightsAtDB, ServiceMetaDataAtDB +from ...models.services_db import ServiceAccessRightsAtDB, ServiceMetaDataDBGet from ...services.director import DirectorApi -from ...services.function_services import is_function_service from ..dependencies.database import get_repository from ..dependencies.director import get_director_api from ..dependencies.services import get_service_from_manifest @@ -37,7 +35,7 @@ def _compose_service_details( service_in_registry: dict[str, Any], # published part - service_in_db: ServiceMetaDataAtDB, # editable part + service_in_db: ServiceMetaDataDBGet, # editable part service_access_rights_in_db: list[ServiceAccessRightsAtDB], service_owner: str | None, ) -> ServiceGet | None: @@ -154,7 +152,7 @@ async def cached_registry_services() -> dict[str, Any]: services_owner_emails, ) = await asyncio.gather( cached_registry_services(), - services_repo.list_services_access_rights( + services_repo.batch_get_services_access_rights( key_versions=services_in_db, product_name=x_simcore_products_name, ), @@ -266,121 +264,3 @@ async def get_service( | service_in_db.model_dump(exclude_unset=True, exclude={"owner"}) ) return service_data - - -@router.patch( - "/{service_key:path}/{service_version}", - response_model=ServiceGet, - **RESPONSE_MODEL_POLICY, -) -async def update_service( - # pylint: disable=too-many-arguments - user_id: int, - service_key: ServiceKey, - service_version: ServiceVersion, - updated_service: ServiceUpdate, - director_client: Annotated[DirectorApi, Depends(get_director_api)], - groups_repository: Annotated[ - GroupsRepository, Depends(get_repository(GroupsRepository)) - ], - services_repo: Annotated[ - ServicesRepository, Depends(get_repository(ServicesRepository)) - ], - x_simcore_products_name: Annotated[str | None, Header()] = None, -): - if is_function_service(service_key): - # NOTE: this is a temporary decision after discussing with OM - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Cannot update front-end services", - ) - - # check the service exists - await director_client.get( - f"/services/{urllib.parse.quote_plus(service_key)}/{service_version}" - ) - # the director client already raises an exception if not found - - # get the user groups - user_groups = await groups_repository.list_user_groups(user_id) - if not user_groups: - # deny access - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You have unsufficient rights to access the service", - ) - # check the user has write access to this service - writable_service = await services_repo.get_service( - service_key, - service_version, - gids=[group.gid for group in user_groups], - write_access=True, - product_name=x_simcore_products_name, - ) - if not writable_service: - # deny access - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You have unsufficient rights to modify the service", - ) - - # let's modify the service then - await services_repo.update_service( - ServiceMetaDataAtDB( - key=service_key, - version=service_version, - **updated_service.model_dump(exclude_unset=True), - ) - ) - # let's modify the service access rights (they can be added/removed/modified) - current_gids_in_db = [ - r.gid - for r in await services_repo.get_service_access_rights( - service_key, service_version, product_name=x_simcore_products_name - ) - ] - - if updated_service.access_rights: - # start by updating/inserting new entries - assert x_simcore_products_name # nosec - new_access_rights = [ - ServiceAccessRightsAtDB( - key=service_key, - version=service_version, - gid=gid, - execute_access=rights.execute_access, - write_access=rights.write_access, - product_name=x_simcore_products_name, - ) - for gid, rights in updated_service.access_rights.items() - ] - await services_repo.upsert_service_access_rights(new_access_rights) - - # then delete the ones that were removed - removed_gids = [ - gid - for gid in current_gids_in_db - if gid not in updated_service.access_rights - ] - deleted_access_rights = [ - ServiceAccessRightsAtDB( - key=service_key, - version=service_version, - gid=gid, - product_name=x_simcore_products_name, - ) - for gid in removed_gids - ] - await services_repo.delete_service_access_rights(deleted_access_rights) - - # now return the service - assert x_simcore_products_name # nosec - return await get_service( - user_id=user_id, - service_in_manifest=await get_service_from_manifest( - service_key, service_version, director_client - ), - groups_repository=groups_repository, - services_repo=services_repo, - x_simcore_products_name=x_simcore_products_name, - ) diff --git a/services/catalog/src/simcore_service_catalog/api/rpc/_services.py b/services/catalog/src/simcore_service_catalog/api/rpc/_services.py index cb102301ecc..7568b18e351 100644 --- a/services/catalog/src/simcore_service_catalog/api/rpc/_services.py +++ b/services/catalog/src/simcore_service_catalog/api/rpc/_services.py @@ -4,6 +4,7 @@ from fastapi import FastAPI from models_library.api_schemas_catalog.services import ( + MyServiceGet, PageRpcServicesGetV2, ServiceGetV2, ServiceUpdateV2, @@ -12,7 +13,7 @@ from models_library.rpc_pagination import DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, PageLimitInt from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID -from pydantic import NonNegativeInt +from pydantic import NonNegativeInt, ValidationError, validate_call from pyinstrument import Profiler from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RPCRouter @@ -20,6 +21,7 @@ CatalogForbiddenError, CatalogItemNotFoundError, ) +from simcore_service_catalog.db.repositories.groups import GroupsRepository from ...db.repositories.services import ServicesRepository from ...services import services_api @@ -51,9 +53,9 @@ async def _wrapper(app: FastAPI, **kwargs): return _wrapper -@router.expose(reraise_if_error_type=(CatalogForbiddenError,)) -@log_decorator(_logger, level=logging.DEBUG) +@router.expose(reraise_if_error_type=(CatalogForbiddenError, ValidationError)) @_profile_rpc_call +@validate_call(config={"arbitrary_types_allowed": True}) async def list_services_paginated( app: FastAPI, *, @@ -64,7 +66,7 @@ async def list_services_paginated( ) -> PageRpcServicesGetV2: assert app.state.engine # nosec - total_count, items = await services_api.list_services_paginated( + total_count, items = await services_api.list_latest_services( repo=ServicesRepository(app.state.engine), director_api=get_director_api(app), product_name=product_name, @@ -87,9 +89,16 @@ async def list_services_paginated( ) -@router.expose(reraise_if_error_type=(CatalogItemNotFoundError, CatalogForbiddenError)) +@router.expose( + reraise_if_error_type=( + CatalogItemNotFoundError, + CatalogForbiddenError, + ValidationError, + ) +) @log_decorator(_logger, level=logging.DEBUG) @_profile_rpc_call +@validate_call(config={"arbitrary_types_allowed": True}) async def get_service( app: FastAPI, *, @@ -115,8 +124,15 @@ async def get_service( return service -@router.expose(reraise_if_error_type=(CatalogItemNotFoundError, CatalogForbiddenError)) +@router.expose( + reraise_if_error_type=( + CatalogItemNotFoundError, + CatalogForbiddenError, + ValidationError, + ) +) @log_decorator(_logger, level=logging.DEBUG) +@validate_call(config={"arbitrary_types_allowed": True}) async def update_service( app: FastAPI, *, @@ -146,8 +162,15 @@ async def update_service( return service -@router.expose(reraise_if_error_type=(CatalogItemNotFoundError, CatalogForbiddenError)) +@router.expose( + reraise_if_error_type=( + CatalogItemNotFoundError, + CatalogForbiddenError, + ValidationError, + ) +) @log_decorator(_logger, level=logging.DEBUG) +@validate_call(config={"arbitrary_types_allowed": True}) async def check_for_service( app: FastAPI, *, @@ -166,3 +189,33 @@ async def check_for_service( service_key=service_key, service_version=service_version, ) + + +@router.expose(reraise_if_error_type=(CatalogForbiddenError, ValidationError)) +@log_decorator(_logger, level=logging.DEBUG) +@validate_call(config={"arbitrary_types_allowed": True}) +async def batch_get_my_services( + app: FastAPI, + *, + product_name: ProductName, + user_id: UserID, + ids: list[ + tuple[ + ServiceKey, + ServiceVersion, + ] + ], +) -> list[MyServiceGet]: + assert app.state.engine # nosec + + services = await services_api.batch_get_my_services( + repo=ServicesRepository(app.state.engine), + groups_repo=GroupsRepository(app.state.engine), + product_name=product_name, + user_id=user_id, + ids=ids, + ) + + assert [(sv.key, sv.release.version) for sv in services] == ids # nosec + + return services diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index a736891c736..7bedab76a31 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -5,11 +5,11 @@ from models_library.basic_types import BootModeEnum from servicelib.fastapi import timing_middleware from servicelib.fastapi.openapi import override_fastapi_openapi_method -from servicelib.fastapi.profiler_middleware import ProfilerMiddleware +from servicelib.fastapi.profiler import initialize_profiler from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from starlette.middleware.base import BaseHTTPMiddleware from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY @@ -23,8 +23,25 @@ _logger = logging.getLogger(__name__) +_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR +_NOISY_LOGGERS = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "werkzeug", +) + def create_app(settings: ApplicationSettings | None = None) -> FastAPI: + # keep mostly quiet noisy loggers + quiet_level: int = max( + min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + ) + for name in _NOISY_LOGGERS: + logging.getLogger(name).setLevel(quiet_level) + if settings is None: settings = ApplicationSettings.create_from_envs() @@ -47,7 +64,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app.state.settings = settings if settings.CATALOG_TRACING: - setup_tracing(app, settings.CATALOG_TRACING, APP_NAME) + initialize_tracing(app, settings.CATALOG_TRACING, APP_NAME) # STARTUP-EVENT app.add_event_handler("startup", create_on_startup(app)) @@ -61,7 +78,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: # MIDDLEWARES if app.state.settings.CATALOG_PROFILING: - app.add_middleware(ProfilerMiddleware) + initialize_profiler(app) if settings.SC_BOOT_MODE != BootModeEnum.PRODUCTION: # middleware to time requests (ONLY for development) diff --git a/services/catalog/src/simcore_service_catalog/core/background_tasks.py b/services/catalog/src/simcore_service_catalog/core/background_tasks.py index 5e513246732..cb269ee3919 100644 --- a/services/catalog/src/simcore_service_catalog/core/background_tasks.py +++ b/services/catalog/src/simcore_service_catalog/core/background_tasks.py @@ -28,7 +28,7 @@ from ..db.repositories.groups import GroupsRepository from ..db.repositories.projects import ProjectsRepository from ..db.repositories.services import ServicesRepository -from ..models.services_db import ServiceAccessRightsAtDB, ServiceMetaDataAtDB +from ..models.services_db import ServiceAccessRightsAtDB, ServiceMetaDataDBCreate from ..services import access_rights _logger = logging.getLogger(__name__) @@ -89,7 +89,9 @@ def _by_version(t: tuple[ServiceKey, ServiceVersion]) -> Version: # set the service in the DB await services_repo.create_or_update_service( - ServiceMetaDataAtDB(**service_metadata.model_dump(), owner=owner_gid), + ServiceMetaDataDBCreate( + **service_metadata.model_dump(exclude_unset=True), owner=owner_gid + ), service_access_rights, ) diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/_services_sql.py b/services/catalog/src/simcore_service_catalog/db/repositories/_services_sql.py index 971e9339eb9..d83dffbdb22 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/_services_sql.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/_services_sql.py @@ -4,11 +4,13 @@ from models_library.products import ProductName from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID +from simcore_postgres_database.utils_repos import get_columns_from_db_model from sqlalchemy.dialects.postgresql import ARRAY, INTEGER, array_agg from sqlalchemy.sql import and_, or_ from sqlalchemy.sql.expression import func from sqlalchemy.sql.selectable import Select +from ...models.services_db import ServiceMetaDataDBGet from ..tables import ( services_access_rights, services_compatibility, @@ -17,6 +19,10 @@ users, ) +SERVICES_META_DATA_COLS = get_columns_from_db_model( + services_meta_data, ServiceMetaDataDBGet +) + def list_services_stmt( *, @@ -26,7 +32,7 @@ def list_services_stmt( combine_access_with_and: bool | None = True, product_name: str | None = None, ) -> Select: - stmt = sa.select(services_meta_data) + stmt = sa.select(*SERVICES_META_DATA_COLS) if gids or execute_access or write_access: conditions: list[Any] = [] @@ -50,13 +56,9 @@ def list_services_stmt( if product_name: conditions.append(services_access_rights.c.product_name == product_name) - stmt = ( - sa.select( - services_meta_data, - ) - .distinct(services_meta_data.c.key, services_meta_data.c.version) - .select_from(services_meta_data.join(services_access_rights)) - ) + stmt = stmt.distinct( + services_meta_data.c.key, services_meta_data.c.version + ).select_from(services_meta_data.join(services_access_rights)) if conditions: stmt = stmt.where(and_(*conditions)) stmt = stmt.order_by(services_meta_data.c.key, services_meta_data.c.version) @@ -133,7 +135,7 @@ def total_count_stmt( ) -def list_latest_services_with_history_stmt( +def list_latest_services_stmt( *, product_name: ProductName, user_id: UserID, @@ -172,7 +174,7 @@ def list_latest_services_with_history_stmt( ) # get all information of latest's services listed in CTE - latest_query = ( + latest_stmt = ( sa.select( services_meta_data.c.key, services_meta_data.c.version, @@ -181,6 +183,7 @@ def list_latest_services_with_history_stmt( services_meta_data.c.description, services_meta_data.c.description_ui, services_meta_data.c.thumbnail, + services_meta_data.c.icon, services_meta_data.c.version_display, services_meta_data.c.classifiers, services_meta_data.c.created, @@ -203,124 +206,26 @@ def list_latest_services_with_history_stmt( .subquery("latest_sq") ) - # get history for every unique service-key in CTE - _accessible_sq = ( - sa.select( - services_meta_data.c.key, - services_meta_data.c.version, - ) - .distinct() - .select_from( - services_meta_data.join( - cte, - services_meta_data.c.key == cte.c.key, - ) - # joins because access-rights might change per version - .join( - services_access_rights, - (services_meta_data.c.key == services_access_rights.c.key) - & (services_meta_data.c.version == services_access_rights.c.version) - & (services_access_rights.c.product_name == product_name), - ) - .join( - user_to_groups, - (user_to_groups.c.gid == services_access_rights.c.gid) - & (user_to_groups.c.uid == user_id), - ) - .outerjoin( - services_compatibility, - (services_meta_data.c.key == services_compatibility.c.key) - & (services_meta_data.c.version == services_compatibility.c.version), - ) - ) - .where(access_rights) - .subquery("accessible_sq") - ) - - history_subquery = ( - sa.select( - services_meta_data.c.key, - services_meta_data.c.version, - services_meta_data.c.version_display, - services_meta_data.c.deprecated, - services_meta_data.c.created, - services_compatibility.c.custom_policy, # CompatiblePolicyDict | None - ) - .select_from( - services_meta_data.join( - _accessible_sq, - (services_meta_data.c.key == _accessible_sq.c.key) - & (services_meta_data.c.version == _accessible_sq.c.version), - ).outerjoin( - services_compatibility, - (services_meta_data.c.key == services_compatibility.c.key) - & (services_meta_data.c.version == services_compatibility.c.version), - ) - ) - .order_by( - services_meta_data.c.key, - sa.desc(_version(services_meta_data.c.version)), # latest version first - ) - .subquery("history_sq") - ) - - return ( - sa.select( - latest_query.c.key, - latest_query.c.version, - # display - latest_query.c.name, - latest_query.c.description, - latest_query.c.description_ui, - latest_query.c.thumbnail, - latest_query.c.version_display, - # ownership - latest_query.c.owner_email, - # tags - latest_query.c.classifiers, - latest_query.c.quality, - # lifetime - latest_query.c.created, - latest_query.c.modified, - latest_query.c.deprecated, - # releases (NOTE: at some points we should limit this list?) - array_agg( - func.json_build_object( - "version", - history_subquery.c.version, - "version_display", - history_subquery.c.version_display, - "deprecated", - history_subquery.c.deprecated, - "created", - history_subquery.c.created, - "compatibility_policy", # NOTE: this is the `policy` - history_subquery.c.custom_policy, - ) - ).label("history"), - ) - .join( - history_subquery, - latest_query.c.key == history_subquery.c.key, - ) - .group_by( - history_subquery.c.key, - latest_query.c.key, - latest_query.c.version, - latest_query.c.owner_email, - latest_query.c.name, - latest_query.c.description, - latest_query.c.description_ui, - latest_query.c.thumbnail, - latest_query.c.version_display, - latest_query.c.classifiers, - latest_query.c.created, - latest_query.c.modified, - latest_query.c.deprecated, - latest_query.c.quality, - ) - .order_by(history_subquery.c.key) - ) + return sa.select( + latest_stmt.c.key, + latest_stmt.c.version, + # display + latest_stmt.c.name, + latest_stmt.c.description, + latest_stmt.c.description_ui, + latest_stmt.c.thumbnail, + latest_stmt.c.icon, + latest_stmt.c.version_display, + # ownership + latest_stmt.c.owner_email, + # tags + latest_stmt.c.classifiers, + latest_stmt.c.quality, + # lifetime + latest_stmt.c.created, + latest_stmt.c.modified, + latest_stmt.c.deprecated, + ).order_by(latest_stmt.c.key) def can_get_service_stmt( @@ -374,6 +279,7 @@ def get_service_stmt( services_meta_data.c.description, services_meta_data.c.description_ui, services_meta_data.c.thumbnail, + services_meta_data.c.icon, services_meta_data.c.version_display, # ownership owner_subquery.label("owner_email"), diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/products.py b/services/catalog/src/simcore_service_catalog/db/repositories/products.py index 57b036150d2..ea59f9dab05 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/products.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/products.py @@ -5,6 +5,6 @@ class ProductsRepository(BaseRepository): async def get_default_product_name(self) -> str: - async with self.db_engine.begin() as conn: + async with self.db_engine.connect() as conn: product_name: str = await get_default_product_name(conn) return product_name diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/services.py b/services/catalog/src/simcore_service_catalog/db/repositories/services.py index 7cb1b72e333..509a23d68d6 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/services.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/services.py @@ -17,26 +17,28 @@ from psycopg2.errors import ForeignKeyViolation from pydantic import PositiveInt, TypeAdapter, ValidationError from simcore_postgres_database.utils_services import create_select_latest_services_query -from sqlalchemy import literal_column from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.sql import and_, or_ from sqlalchemy.sql.expression import tuple_ from ...models.services_db import ( - ReleaseFromDB, + ReleaseDBGet, ServiceAccessRightsAtDB, - ServiceMetaDataAtDB, - ServiceWithHistoryFromDB, + ServiceMetaDataDBCreate, + ServiceMetaDataDBGet, + ServiceMetaDataDBPatch, + ServiceWithHistoryDBGet, ) from ...models.services_specifications import ServiceSpecificationsAtDB from ..tables import services_access_rights, services_meta_data, services_specifications from ._base import BaseRepository from ._services_sql import ( + SERVICES_META_DATA_COLS, AccessRightsClauses, can_get_service_stmt, get_service_history_stmt, get_service_stmt, - list_latest_services_with_history_stmt, + list_latest_services_stmt, list_services_stmt, total_count_stmt, ) @@ -79,11 +81,11 @@ async def list_services( write_access: bool | None = None, combine_access_with_and: bool | None = True, product_name: str | None = None, - ) -> list[ServiceMetaDataAtDB]: + ) -> list[ServiceMetaDataDBGet]: async with self.db_engine.connect() as conn: return [ - ServiceMetaDataAtDB.model_validate(row) + ServiceMetaDataDBGet.model_validate(row) async for row in await conn.stream( list_services_stmt( gids=gids, @@ -102,7 +104,7 @@ async def list_service_releases( major: int | None = None, minor: int | None = None, limit_count: int | None = None, - ) -> list[ServiceMetaDataAtDB]: + ) -> list[ServiceMetaDataDBGet]: """Lists LAST n releases of a given service, sorted from latest first major, minor is used to filter as major.minor.* or major.* @@ -124,7 +126,7 @@ async def list_service_releases( search_condition &= services_meta_data.c.version.like(f"{major}.%") query = ( - sa.select(services_meta_data) + sa.select(*SERVICES_META_DATA_COLS) .where(search_condition) .order_by(sa.desc(services_meta_data.c.version)) ) @@ -134,22 +136,22 @@ async def list_service_releases( async with self.db_engine.connect() as conn: releases = [ - ServiceMetaDataAtDB.model_validate(row) + ServiceMetaDataDBGet.model_validate(row) async for row in await conn.stream(query) ] # Now sort naturally from latest first: (This is lame, the sorting should be done in the db) - def _by_version(x: ServiceMetaDataAtDB) -> packaging.version.Version: + def _by_version(x: ServiceMetaDataDBGet) -> packaging.version.Version: return packaging.version.parse(x.version) return sorted(releases, key=_by_version, reverse=True) - async def get_latest_release(self, key: str) -> ServiceMetaDataAtDB | None: + async def get_latest_release(self, key: str) -> ServiceMetaDataDBGet | None: """Returns last release or None if service was never released""" services_latest = create_select_latest_services_query().alias("services_latest") query = ( - sa.select(services_meta_data) + sa.select(SERVICES_META_DATA_COLS) .select_from( services_latest.join( services_meta_data, @@ -163,7 +165,7 @@ async def get_latest_release(self, key: str) -> ServiceMetaDataAtDB | None: result = await conn.execute(query) row = result.first() if row: - return ServiceMetaDataAtDB.model_validate(row) + return ServiceMetaDataDBGet.model_validate(row) return None # mypy async def get_service( @@ -175,18 +177,11 @@ async def get_service( execute_access: bool | None = None, write_access: bool | None = None, product_name: str | None = None, - ) -> ServiceMetaDataAtDB | None: + ) -> ServiceMetaDataDBGet | None: - query = sa.select(services_meta_data).where( - (services_meta_data.c.key == key) - & (services_meta_data.c.version == version) - ) - if gids or execute_access or write_access: - - query = sa.select(services_meta_data).select_from( - services_meta_data.join(services_access_rights) - ) + query = sa.select(*SERVICES_META_DATA_COLS) + if gids or execute_access or write_access: conditions = [ services_meta_data.c.key == key, services_meta_data.c.version == version, @@ -202,20 +197,27 @@ async def get_service( if product_name: conditions.append(services_access_rights.c.product_name == product_name) - query = query.where(and_(*conditions)) + query = query.select_from( + services_meta_data.join(services_access_rights) + ).where(and_(*conditions)) + else: + query = query.where( + (services_meta_data.c.key == key) + & (services_meta_data.c.version == version) + ) async with self.db_engine.connect() as conn: result = await conn.execute(query) row = result.first() if row: - return ServiceMetaDataAtDB.model_validate(row) + return ServiceMetaDataDBGet.model_validate(row) return None # mypy async def create_or_update_service( self, - new_service: ServiceMetaDataAtDB, + new_service: ServiceMetaDataDBCreate, new_service_access_rights: list[ServiceAccessRightsAtDB], - ) -> ServiceMetaDataAtDB: + ) -> ServiceMetaDataDBGet: for access_rights in new_service_access_rights: if ( access_rights.key != new_service.key @@ -229,12 +231,12 @@ async def create_or_update_service( result = await conn.execute( # pylint: disable=no-value-for-parameter services_meta_data.insert() - .values(**new_service.model_dump(by_alias=True)) - .returning(literal_column("*")) + .values(**new_service.model_dump(exclude_unset=True)) + .returning(*SERVICES_META_DATA_COLS) ) row = result.first() assert row # nosec - created_service = ServiceMetaDataAtDB.model_validate(row) + created_service = ServiceMetaDataDBGet.model_validate(row) for access_rights in new_service_access_rights: insert_stmt = pg_insert(services_access_rights).values( @@ -243,13 +245,18 @@ async def create_or_update_service( await conn.execute(insert_stmt) return created_service - async def update_service(self, patched_service: ServiceMetaDataAtDB) -> None: + async def update_service( + self, + service_key: ServiceKey, + service_version: ServiceVersion, + patched_service: ServiceMetaDataDBPatch, + ) -> None: stmt_update = ( services_meta_data.update() .where( - (services_meta_data.c.key == patched_service.key) - & (services_meta_data.c.version == patched_service.version) + (services_meta_data.c.key == service_key) + & (services_meta_data.c.version == service_version) ) .values( **patched_service.model_dump( @@ -313,7 +320,7 @@ async def get_service_with_history( # get args key: ServiceKey, version: ServiceVersion, - ) -> ServiceWithHistoryFromDB | None: + ) -> ServiceWithHistoryDBGet | None: stmt_get = get_service_stmt( product_name=product_name, @@ -338,13 +345,14 @@ async def get_service_with_history( result = await conn.execute(stmt_history) row_h = result.one_or_none() - return ServiceWithHistoryFromDB( + return ServiceWithHistoryDBGet( key=row.key, version=row.version, # display name=row.name, description=row.description, description_ui=row.description_ui, + icon=row.icon, thumbnail=row.thumbnail, version_display=row.version_display, # ownership @@ -370,7 +378,7 @@ async def list_latest_services( # list args: pagination limit: int | None = None, offset: int | None = None, - ) -> tuple[PositiveInt, list[ServiceWithHistoryFromDB]]: + ) -> tuple[PositiveInt, list[ServiceWithHistoryDBGet]]: # get page stmt_total = total_count_stmt( @@ -378,7 +386,7 @@ async def list_latest_services( user_id=user_id, access_rights=AccessRightsClauses.can_read, ) - stmt_page = list_latest_services_with_history_stmt( + stmt_page = list_latest_services_stmt( product_name=product_name, user_id=user_id, access_rights=AccessRightsClauses.can_read, @@ -386,7 +394,7 @@ async def list_latest_services( offset=offset, ) - async with self.db_engine.begin() as conn: + async with self.db_engine.connect() as conn: result = await conn.execute(stmt_total) total_count = result.scalar() or 0 @@ -396,7 +404,7 @@ async def list_latest_services( # compose history with latest items_page = [ - ServiceWithHistoryFromDB( + ServiceWithHistoryDBGet( key=r.key, version=r.version, # display @@ -404,6 +412,7 @@ async def list_latest_services( description=r.description, description_ui=r.description_ui, thumbnail=r.thumbnail, + icon=r.icon, version_display=r.version_display, # ownership owner_email=r.owner_email, @@ -415,7 +424,7 @@ async def list_latest_services( modified=r.modified, deprecated=r.deprecated, # releases - history=r.history, + history=[], # NOTE: for listing we will not add history. Only get service will produce history ) for r in rows ] @@ -429,7 +438,7 @@ async def get_service_history( user_id: UserID, # get args key: ServiceKey, - ) -> list[ReleaseFromDB] | None: + ) -> list[ReleaseDBGet] | None: stmt_history = get_service_history_stmt( product_name=product_name, @@ -437,12 +446,12 @@ async def get_service_history( access_rights=AccessRightsClauses.can_read, service_key=key, ) - async with self.db_engine.begin() as conn: + async with self.db_engine.connect() as conn: result = await conn.execute(stmt_history) row = result.one_or_none() return ( - TypeAdapter(list[ReleaseFromDB]).validate_python(row.history) + TypeAdapter(list[ReleaseDBGet]).validate_python(row.history) if row else None ) @@ -472,7 +481,7 @@ async def get_service_access_rights( async for row in await conn.stream(query) ] - async def list_services_access_rights( + async def batch_get_services_access_rights( self, key_versions: Iterable[tuple[str, str]], product_name: str | None = None, diff --git a/services/catalog/src/simcore_service_catalog/models/services_db.py b/services/catalog/src/simcore_service_catalog/models/services_db.py index 89a61af2e7a..2ad800d2b44 100644 --- a/services/catalog/src/simcore_service_catalog/models/services_db.py +++ b/services/catalog/src/simcore_service_catalog/models/services_db.py @@ -1,63 +1,188 @@ from datetime import datetime from typing import Annotated, Any +from common_library.basic_types import DEFAULT_FACTORY +from models_library.basic_types import IdInt +from models_library.groups import GroupID from models_library.products import ProductName from models_library.services_access import ServiceGroupAccessRights from models_library.services_base import ServiceKeyVersion -from models_library.services_metadata_editable import ServiceMetaDataEditable from models_library.services_types import ServiceKey, ServiceVersion -from pydantic import BaseModel, ConfigDict, Field, HttpUrl -from pydantic.types import PositiveInt +from models_library.utils.common_validators import empty_str_to_none_pre_validator +from pydantic import ( + BaseModel, + BeforeValidator, + ConfigDict, + Field, + HttpUrl, + field_validator, +) +from pydantic.config import JsonDict from simcore_postgres_database.models.services_compatibility import CompatiblePolicyDict -class ServiceMetaDataAtDB(ServiceKeyVersion, ServiceMetaDataEditable): - # for a partial update all Editable members must be Optional - name: str | None = None - thumbnail: Annotated[str, HttpUrl] | None = None - description: str | None = None +class ServiceMetaDataDBGet(BaseModel): + # primary-keys + key: ServiceKey + version: ServiceVersion - classifiers: Annotated[list[str] | None, Field(default_factory=list)] - owner: PositiveInt | None = None + # ownership + owner: GroupID | None - model_config = ConfigDict( - from_attributes=True, - json_schema_extra={ - "example": { - "key": "simcore/services/dynamic/sim4life", - "version": "1.0.9", - "owner": 8, - "name": "sim4life", - "description": "s4l web", - "description_ui": 0, - "thumbnail": "http://thumbnailit.org/image", - "version_display": "S4L X", - "created": "2021-01-18 12:46:57.7315", - "modified": "2021-01-19 12:45:00", - "deprecated": "2099-01-19 12:45:00", - "quality": { - "enabled": True, - "tsr_target": { - f"r{n:02d}": {"level": 4, "references": ""} - for n in range(1, 11) - }, - "annotations": { - "vandv": "", - "limitations": "", - "certificationLink": "", - "certificationStatus": "Uncertified", - }, - "tsr_current": { - f"r{n:02d}": {"level": 0, "references": ""} - for n in range(1, 11) + # display + name: str + description: str + description_ui: bool + thumbnail: str | None + icon: str | None + version_display: str | None + + # tagging + classifiers: list[str] + quality: dict[str, Any] + + # lifecycle + created: datetime + modified: datetime + deprecated: datetime | None + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "key": "simcore/services/dynamic/reading", + "version": "1.0.9", + "owner": 8, + "name": "reading", + "description": "example for service metadata db GET", + "description_ui": False, + "thumbnail": None, + "icon": "https://picsum.photos/50", + "version_display": "S4L X", + "classifiers": ["foo", "bar"], + "quality": { + "enabled": True, + "tsr_target": { + f"r{n:02d}": {"level": 4, "references": ""} + for n in range(1, 11) + }, + "annotations": { + "vandv": "", + "limitations": "", + "certificationLink": "", + "certificationStatus": "Uncertified", + }, + "tsr_current": { + f"r{n:02d}": {"level": 0, "references": ""} + for n in range(1, 11) + }, }, - }, + "created": "2021-01-18 12:46:57.7315", + "modified": "2021-01-19 12:45:00", + "deprecated": "2099-01-19 12:45:00", + } } - }, + ) + + model_config = ConfigDict( + from_attributes=True, json_schema_extra=_update_json_schema_extra ) -class ReleaseFromDB(BaseModel): +def _httpurl_to_str(value: HttpUrl | str | None) -> str | None: + if isinstance(value, HttpUrl): + return f"{value}" + return value + + +class ServiceMetaDataDBCreate(BaseModel): + # primary-keys + key: ServiceKey + version: ServiceVersion + + # ownership + owner: IdInt | None = None + + # display + name: str + description: str + description_ui: bool = False + thumbnail: str | None = None + icon: Annotated[str | None, BeforeValidator(_httpurl_to_str)] = None + version_display: str | None = None + + # tagging + classifiers: Annotated[list[str], Field(default_factory=list)] = DEFAULT_FACTORY + quality: Annotated[dict[str, Any], Field(default_factory=dict)] = DEFAULT_FACTORY + + # lifecycle + deprecated: datetime | None = None + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # minimal w/ required values + { + "key": "simcore/services/dynamic/creating", + "version": "1.0.9", + "name": "creating", + "description": "example for service metadata db CREATE", + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + _prevent_empty_strings_in_nullable_string_cols = field_validator( + "icon", "thumbnail", "version_display", mode="before" + )(empty_str_to_none_pre_validator) + + +class ServiceMetaDataDBPatch(BaseModel): + # ownership + owner: IdInt | None = None + + # display + name: str | None = None + description: str | None = None + description_ui: bool = False + version_display: str | None = None + thumbnail: str | None = None + icon: str | None = None + + # tagging + classifiers: Annotated[list[str], Field(default_factory=list)] = DEFAULT_FACTORY + quality: Annotated[dict[str, Any], Field(default_factory=dict)] = DEFAULT_FACTORY + + # lifecycle + deprecated: datetime | None = None + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "name": "patching", + "description": "example for service metadata db PATCH", + "thumbnail": "https://picsum.photos/200", + "icon": "https://picsum.photos/50", + "version_display": "S4L X", + } + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + _prevent_empty_strings_in_nullable_string_cols = field_validator( + "icon", "thumbnail", "version_display", mode="before" + )(empty_str_to_none_pre_validator) + + +class ReleaseDBGet(BaseModel): version: ServiceVersion version_display: str | None deprecated: datetime | None @@ -65,7 +190,7 @@ class ReleaseFromDB(BaseModel): compatibility_policy: CompatiblePolicyDict | None -class ServiceWithHistoryFromDB(BaseModel): +class ServiceWithHistoryDBGet(BaseModel): key: ServiceKey version: ServiceVersion # display @@ -73,6 +198,7 @@ class ServiceWithHistoryFromDB(BaseModel): description: str description_ui: bool thumbnail: str | None + icon: str | None version_display: str | None # ownership owner_email: str | None @@ -84,31 +210,37 @@ class ServiceWithHistoryFromDB(BaseModel): modified: datetime deprecated: datetime | None # releases - history: list[ReleaseFromDB] + history: list[ReleaseDBGet] assert ( # nosec - set(ReleaseFromDB.model_fields) + set(ReleaseDBGet.model_fields) .difference({"compatibility_policy"}) - .issubset(set(ServiceWithHistoryFromDB.model_fields)) + .issubset(set(ServiceWithHistoryDBGet.model_fields)) ) class ServiceAccessRightsAtDB(ServiceKeyVersion, ServiceGroupAccessRights): - gid: PositiveInt + gid: GroupID product_name: ProductName - model_config = ConfigDict( - from_attributes=True, - json_schema_extra={ - "example": { - "key": "simcore/services/dynamic/sim4life", - "version": "1.0.9", - "gid": 8, - "execute_access": True, - "write_access": True, - "product_name": "osparc", - "created": "2021-01-18 12:46:57.7315", - "modified": "2021-01-19 12:45:00", + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "key": "simcore/services/dynamic/sim4life", + "version": "1.0.9", + "gid": 8, + "execute_access": True, + "write_access": True, + "product_name": "osparc", + "created": "2021-01-18 12:46:57.7315", + "modified": "2021-01-19 12:45:00", + } } - }, + ) + + model_config = ConfigDict( + from_attributes=True, json_schema_extra=_update_json_schema_extra ) diff --git a/services/catalog/src/simcore_service_catalog/services/compatibility.py b/services/catalog/src/simcore_service_catalog/services/compatibility.py index 9c21e8b7ea7..db8483e11c9 100644 --- a/services/catalog/src/simcore_service_catalog/services/compatibility.py +++ b/services/catalog/src/simcore_service_catalog/services/compatibility.py @@ -11,7 +11,7 @@ from simcore_service_catalog.utils.versioning import as_version from ..db.repositories.services import ServicesRepository -from ..models.services_db import ReleaseFromDB +from ..models.services_db import ReleaseDBGet def _get_default_compatibility_specs(target: ServiceVersion | Version) -> SpecifierSet: @@ -41,7 +41,7 @@ def _get_latest_compatible_version( return max(compatible_versions, default=None) -def _convert_to_versions(service_history: list[ReleaseFromDB]) -> list[Version]: +def _convert_to_versions(service_history: list[ReleaseDBGet]) -> list[Version]: return sorted( (as_version(h.version) for h in service_history if not h.deprecated), reverse=True, # latest first @@ -94,11 +94,15 @@ async def evaluate_service_compatibility_map( repo: ServicesRepository, product_name: ProductName, user_id: UserID, - service_release_history: list[ReleaseFromDB], + service_release_history: list[ReleaseDBGet], ) -> dict[ServiceVersion, Compatibility | None]: - released_versions = _convert_to_versions(service_release_history) - result: dict[ServiceVersion, Compatibility | None] = {} + """ + Evaluates the compatibility among a list of service releases for a given product and user. + """ + compatibility_map: dict[ServiceVersion, Compatibility | None] = {} + + released_versions = _convert_to_versions(service_release_history) for release in service_release_history: compatibility = None if release.compatibility_policy: @@ -108,7 +112,7 @@ async def evaluate_service_compatibility_map( repo=repo, target_version=release.version, released_versions=released_versions, - compatibility_policy={**release.compatibility_policy}, + compatibility_policy=dict(release.compatibility_policy), ) elif latest_version := _get_latest_compatible_version( release.version, @@ -117,6 +121,6 @@ async def evaluate_service_compatibility_map( compatibility = Compatibility( can_update_to=CompatibleService(version=f"{latest_version}") ) - result[release.version] = compatibility + compatibility_map[release.version] = compatibility - return result + return compatibility_map diff --git a/services/catalog/src/simcore_service_catalog/services/function_services.py b/services/catalog/src/simcore_service_catalog/services/function_services.py index 93abd9466f8..7ed546f251b 100644 --- a/services/catalog/src/simcore_service_catalog/services/function_services.py +++ b/services/catalog/src/simcore_service_catalog/services/function_services.py @@ -20,9 +20,9 @@ def _as_dict(model_instance: ServiceMetaDataPublished) -> dict[str, Any]: def get_function_service(key, version) -> ServiceMetaDataPublished: try: return next( - s - for s in iter_service_docker_data() - if s.key == key and s.version == version + sc + for sc in iter_service_docker_data() + if sc.key == key and sc.version == version ) except StopIteration as err: raise HTTPException( diff --git a/services/catalog/src/simcore_service_catalog/services/manifest.py b/services/catalog/src/simcore_service_catalog/services/manifest.py index bf7c26a6b63..5cfbb1d961b 100644 --- a/services/catalog/src/simcore_service_catalog/services/manifest.py +++ b/services/catalog/src/simcore_service_catalog/services/manifest.py @@ -1,4 +1,4 @@ -""" Services Manifest API Documentation +"""Services Manifest API Documentation The `services.manifest` module provides a read-only API to access the services catalog. The term "Manifest" refers to a detailed, finalized list, traditionally used to denote items that are recorded as part of an official inventory or log, emphasizing the immutable nature of the data. @@ -60,7 +60,7 @@ async def get_services_map( # NOTE: functional-services are services w/o associated image services: ServiceMetaDataPublishedDict = { - (s.key, s.version): s for s in iter_service_docker_data() + (sc.key, sc.version): sc for sc in iter_service_docker_data() } for service in services_in_registry: try: diff --git a/services/catalog/src/simcore_service_catalog/services/services_api.py b/services/catalog/src/simcore_service_catalog/services/services_api.py index 4122a035b0f..843a91fc713 100644 --- a/services/catalog/src/simcore_service_catalog/services/services_api.py +++ b/services/catalog/src/simcore_service_catalog/services/services_api.py @@ -1,6 +1,13 @@ import logging +from contextlib import suppress -from models_library.api_schemas_catalog.services import ServiceGetV2, ServiceUpdateV2 +from models_library.api_schemas_catalog.services import ( + LatestServiceGet, + MyServiceGet, + ServiceGetV2, + ServiceUpdateV2, +) +from models_library.groups import GroupID from models_library.products import ProductName from models_library.rest_pagination import PageLimitInt from models_library.services_access import ServiceGroupAccessRightsV2 @@ -13,12 +20,13 @@ CatalogForbiddenError, CatalogItemNotFoundError, ) +from simcore_service_catalog.db.repositories.groups import GroupsRepository from ..db.repositories.services import ServicesRepository from ..models.services_db import ( ServiceAccessRightsAtDB, - ServiceMetaDataAtDB, - ServiceWithHistoryFromDB, + ServiceMetaDataDBPatch, + ServiceWithHistoryDBGet, ) from ..services import manifest from ..services.director import DirectorApi @@ -28,60 +36,96 @@ _logger = logging.getLogger(__name__) -def _db_to_api_model( - service_db: ServiceWithHistoryFromDB, +def _aggregate( + service_db: ServiceWithHistoryDBGet, access_rights_db: list[ServiceAccessRightsAtDB], service_manifest: ServiceMetaDataPublished, - compatibility_map: dict[ServiceVersion, Compatibility | None] | None = None, -) -> ServiceGetV2: - compatibility_map = compatibility_map or {} - - return ServiceGetV2( - key=service_db.key, - version=service_db.version, - name=service_db.name, - thumbnail=HttpUrl(service_db.thumbnail) if service_db.thumbnail else None, - description=service_db.description, - description_ui=service_db.description_ui, - version_display=service_db.version_display, - type=service_manifest.service_type, - contact=service_manifest.contact, - authors=service_manifest.authors, - owner=(service_db.owner_email if service_db.owner_email else None), - inputs=service_manifest.inputs or {}, - outputs=service_manifest.outputs or {}, - boot_options=service_manifest.boot_options, - min_visible_inputs=service_manifest.min_visible_inputs, - access_rights={ +) -> dict: + return { + "key": service_db.key, + "version": service_db.version, + "name": service_db.name, + "thumbnail": HttpUrl(service_db.thumbnail) if service_db.thumbnail else None, + "icon": HttpUrl(service_db.icon) if service_db.icon else None, + "description": service_db.description, + "description_ui": service_db.description_ui, + "version_display": service_db.version_display, + "service_type": service_manifest.service_type, + "contact": service_manifest.contact, + "authors": service_manifest.authors, + "owner": (service_db.owner_email if service_db.owner_email else None), + "inputs": service_manifest.inputs or {}, + "outputs": service_manifest.outputs or {}, + "boot_options": service_manifest.boot_options, + "min_visible_inputs": service_manifest.min_visible_inputs, + "access_rights": { a.gid: ServiceGroupAccessRightsV2.model_construct( execute=a.execute_access, write=a.write_access, ) for a in access_rights_db }, - classifiers=service_db.classifiers, - quality=service_db.quality, - history=[ - ServiceRelease.model_construct( - version=h.version, - version_display=h.version_display, - released=h.created, - retired=h.deprecated, - compatibility=compatibility_map.get(h.version), - ) - for h in service_db.history - ], + "classifiers": service_db.classifiers, + "quality": service_db.quality, + # NOTE: history/release field is removed + } + + +def _to_latest_get_schema( + service_db: ServiceWithHistoryDBGet, + access_rights_db: list[ServiceAccessRightsAtDB], + service_manifest: ServiceMetaDataPublished, +) -> LatestServiceGet: + + assert len(service_db.history) == 0 # nosec + + return LatestServiceGet.model_validate( + { + **_aggregate(service_db, access_rights_db, service_manifest), + "release": ServiceRelease.model_construct( + version=service_db.version, + version_display=service_db.version_display, + released=service_db.created, + retired=service_db.deprecated, + compatibility=None, + ), + } + ) + + +def _to_get_schema( + service_db: ServiceWithHistoryDBGet, + access_rights_db: list[ServiceAccessRightsAtDB], + service_manifest: ServiceMetaDataPublished, + compatibility_map: dict[ServiceVersion, Compatibility | None] | None = None, +) -> ServiceGetV2: + compatibility_map = compatibility_map or {} + + return ServiceGetV2.model_validate( + { + **_aggregate(service_db, access_rights_db, service_manifest), + "history": [ + ServiceRelease.model_construct( + version=h.version, + version_display=h.version_display, + released=h.created, + retired=h.deprecated, + compatibility=compatibility_map.get(h.version), + ) + for h in service_db.history + ], + } ) -async def list_services_paginated( +async def list_latest_services( repo: ServicesRepository, director_api: DirectorApi, product_name: ProductName, user_id: UserID, limit: PageLimitInt | None, offset: NonNegativeInt = 0, -) -> tuple[NonNegativeInt, list[ServiceGetV2]]: +) -> tuple[NonNegativeInt, list[LatestServiceGet]]: # defines the order total_count, services = await repo.list_latest_services( @@ -90,10 +134,10 @@ async def list_services_paginated( if services: # injects access-rights - access_rights: dict[ - tuple[str, str], list[ServiceAccessRightsAtDB] - ] = await repo.list_services_access_rights( - ((s.key, s.version) for s in services), product_name=product_name + access_rights: dict[tuple[str, str], list[ServiceAccessRightsAtDB]] = ( + await repo.batch_get_services_access_rights( + ((sc.key, sc.version) for sc in services), product_name=product_name + ) ) if not access_rights: raise CatalogForbiddenError( @@ -104,31 +148,29 @@ async def list_services_paginated( # get manifest of those with access rights got = await manifest.get_batch_services( - [(s.key, s.version) for s in services if access_rights.get((s.key, s.version))], + [ + (sc.key, sc.version) + for sc in services + if access_rights.get((sc.key, sc.version)) + ], director_api, ) service_manifest = { - (s.key, s.version): s for s in got if isinstance(s, ServiceMetaDataPublished) + (sc.key, sc.version): sc + for sc in got + if isinstance(sc, ServiceMetaDataPublished) } items = [ - _db_to_api_model( - service_db=s, access_rights_db=ar, service_manifest=sm, compatibility_map=cm + _to_latest_get_schema( + service_db=sc, + access_rights_db=ar, + service_manifest=sm, ) - for s in services + for sc in services if ( - (ar := access_rights.get((s.key, s.version))) - and (sm := service_manifest.get((s.key, s.version))) - and ( - # NOTE: This operation might be resource-intensive. - # It is temporarily implemented on a trial basis. - cm := await evaluate_service_compatibility_map( - repo, - product_name=product_name, - user_id=user_id, - service_release_history=s.history, - ) - ) + (ar := access_rights.get((sc.key, sc.version))) + and (sm := service_manifest.get((sc.key, sc.version))) ) ] @@ -187,7 +229,7 @@ async def get_service( service_release_history=service.history, ) - return _db_to_api_model(service, access_rights, service_manifest, compatibility_map) + return _to_get_schema(service, access_rights, service_manifest, compatibility_map) async def update_service( @@ -239,11 +281,13 @@ async def update_service( # Updates service_meta_data await repo.update_service( - ServiceMetaDataAtDB( - key=service_key, - version=service_version, - **update.model_dump(exclude_unset=True), - ) + service_key, + service_version, + ServiceMetaDataDBPatch.model_validate( + update.model_dump( + exclude_unset=True, exclude={"access_rights"}, mode="json" + ), + ), ) # Updates service_access_rights (they can be added/removed/modified) @@ -330,3 +374,93 @@ async def check_for_service( user_id=user_id, product_name=product_name, ) + + +async def batch_get_my_services( + repo: ServicesRepository, + groups_repo: GroupsRepository, + *, + product_name: ProductName, + user_id: UserID, + ids: list[ + tuple[ + ServiceKey, + ServiceVersion, + ] + ], +) -> list[MyServiceGet]: + + services_access_rights = await repo.batch_get_services_access_rights( + key_versions=ids, product_name=product_name + ) + + user_groups = await groups_repo.list_user_groups(user_id=user_id) + my_group_ids = {g.gid for g in user_groups} + + my_services = [] + for service_key, service_version in ids: + + # Evaluate user's access-rights to this service key:version + access_rights = services_access_rights.get((service_key, service_version), []) + my_access_rights = ServiceGroupAccessRightsV2(execute=False, write=False) + for ar in access_rights: + if ar.gid in my_group_ids: + my_access_rights.execute |= ar.execute_access + my_access_rights.write |= ar.write_access + + # Get service metadata + service_db = await repo.get_service( + product_name=product_name, + key=service_key, + version=service_version, + ) + assert service_db # nosec + + # Find service owner (if defined!) + owner: GroupID | None = service_db.owner + if not owner: + # NOTE can be more than one. Just get first. + with suppress(StopIteration): + owner = next( + ar.gid + for ar in access_rights + if ar.write_access and ar.execute_access + ) + + # Evaluate `compatibility` + compatibility: Compatibility | None = None + if my_access_rights.execute or my_access_rights.write: + history = await repo.get_service_history( + # NOTE: that the service history might be different for each user + # since access rights are defined on a k:v basis + product_name=product_name, + user_id=user_id, + key=service_key, + ) + assert history # nosec + + compatibility_map = await evaluate_service_compatibility_map( + repo, + product_name=product_name, + user_id=user_id, + service_release_history=history, + ) + + compatibility = compatibility_map.get(service_db.version) + + my_services.append( + MyServiceGet( + key=service_db.key, + release=ServiceRelease( + version=service_db.version, + version_display=service_db.version_display, + released=service_db.created, + retired=service_db.deprecated, + compatibility=compatibility, + ), + owner=owner, + my_access_rights=my_access_rights, + ) + ) + + return my_services diff --git a/services/catalog/tests/unit/test__model_examples.py b/services/catalog/tests/unit/test__model_examples.py index 7592b8d21f1..05d1d177033 100644 --- a/services/catalog/tests/unit/test__model_examples.py +++ b/services/catalog/tests/unit/test__model_examples.py @@ -4,13 +4,15 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -import json from typing import Any import pytest import simcore_service_catalog.models -from pydantic import BaseModel, ValidationError -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pydantic import BaseModel +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) @pytest.mark.parametrize( @@ -18,11 +20,8 @@ walk_model_examples_in_package(simcore_service_catalog.models), ) def test_catalog_service_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - try: - assert model_cls.model_validate(example_data) is not None - except ValidationError as err: - pytest.fail( - f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" - ) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/services/catalog/tests/unit/test_db_repositories_services_sql.py b/services/catalog/tests/unit/test_db_repositories_services_sql.py index 46993f5a772..09891d5fe5e 100644 --- a/services/catalog/tests/unit/test_db_repositories_services_sql.py +++ b/services/catalog/tests/unit/test_db_repositories_services_sql.py @@ -9,7 +9,7 @@ can_get_service_stmt, get_service_history_stmt, get_service_stmt, - list_latest_services_with_history_stmt, + list_latest_services_stmt, total_count_stmt, ) @@ -61,7 +61,7 @@ def _check(func_smt, **kwargs): ) _check( - list_latest_services_with_history_stmt, + list_latest_services_stmt, product_name=product_name, user_id=user_id, access_rights=AccessRightsClauses.can_read, diff --git a/services/catalog/tests/unit/test_services_compatibility.py b/services/catalog/tests/unit/test_services_compatibility.py index 04ef4bafd4d..1211c25a97b 100644 --- a/services/catalog/tests/unit/test_services_compatibility.py +++ b/services/catalog/tests/unit/test_services_compatibility.py @@ -12,7 +12,7 @@ from packaging.version import Version from pytest_mock import MockerFixture, MockType from simcore_service_catalog.db.repositories.services import ServicesRepository -from simcore_service_catalog.models.services_db import ReleaseFromDB +from simcore_service_catalog.models.services_db import ReleaseDBGet from simcore_service_catalog.services.compatibility import ( _get_latest_compatible_version, evaluate_service_compatibility_map, @@ -178,10 +178,10 @@ async def test_evaluate_service_compatibility_map_with_default_policy( mock_repo: MockType, user_id: UserID ): service_release_history = [ - _create_as(ReleaseFromDB, version="1.0.0"), - _create_as(ReleaseFromDB, version="1.0.1"), - _create_as(ReleaseFromDB, version="1.1.0"), - _create_as(ReleaseFromDB, version="2.0.0"), + _create_as(ReleaseDBGet, version="1.0.0"), + _create_as(ReleaseDBGet, version="1.0.1"), + _create_as(ReleaseDBGet, version="1.1.0"), + _create_as(ReleaseDBGet, version="2.0.0"), ] compatibility_map = await evaluate_service_compatibility_map( @@ -199,14 +199,14 @@ async def test_evaluate_service_compatibility_map_with_custom_policy( mock_repo: MockType, user_id: UserID ): service_release_history = [ - _create_as(ReleaseFromDB, version="1.0.0"), + _create_as(ReleaseDBGet, version="1.0.0"), _create_as( - ReleaseFromDB, + ReleaseDBGet, version="1.0.1", compatibility_policy={"versions_specifier": ">1.1.0,<=2.0.0"}, ), - _create_as(ReleaseFromDB, version="1.2.0"), - _create_as(ReleaseFromDB, version="2.0.0"), + _create_as(ReleaseDBGet, version="1.2.0"), + _create_as(ReleaseDBGet, version="2.0.0"), ] compatibility_map = await evaluate_service_compatibility_map( @@ -228,9 +228,9 @@ async def test_evaluate_service_compatibility_map_with_other_service( mock_repo: MockType, user_id: UserID ): service_release_history = [ - _create_as(ReleaseFromDB, version="1.0.0"), + _create_as(ReleaseDBGet, version="1.0.0"), _create_as( - ReleaseFromDB, + ReleaseDBGet, version="1.0.1", compatibility_policy={ "other_service_key": "simcore/services/comp/other_service", @@ -240,9 +240,9 @@ async def test_evaluate_service_compatibility_map_with_other_service( ] mock_repo.get_service_history.return_value = [ - _create_as(ReleaseFromDB, version="5.0.0"), - _create_as(ReleaseFromDB, version="5.1.0"), - _create_as(ReleaseFromDB, version="5.2.0"), + _create_as(ReleaseDBGet, version="5.0.0"), + _create_as(ReleaseDBGet, version="5.1.0"), + _create_as(ReleaseDBGet, version="5.2.0"), ] compatibility_map = await evaluate_service_compatibility_map( @@ -265,10 +265,10 @@ async def test_evaluate_service_compatibility_map_with_deprecated_versions( mock_repo: MockType, user_id: UserID ): service_release_history = [ - _create_as(ReleaseFromDB, version="1.0.0"), - _create_as(ReleaseFromDB, version="1.0.1", deprecated=arrow.now().datetime), - _create_as(ReleaseFromDB, version="1.2.0"), - _create_as(ReleaseFromDB, version="1.2.5"), + _create_as(ReleaseDBGet, version="1.0.0"), + _create_as(ReleaseDBGet, version="1.0.1", deprecated=arrow.now().datetime), + _create_as(ReleaseDBGet, version="1.2.0"), + _create_as(ReleaseDBGet, version="1.2.5"), ] compatibility_map = await evaluate_service_compatibility_map( diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 1bd0bb27e50..74daa97804d 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -18,9 +18,11 @@ from models_library.services import ServiceMetaDataPublished from models_library.users import UserID from pydantic import ConfigDict, TypeAdapter +from pytest_simcore.helpers.catalog_services import CreateFakeServiceDataCallable from pytest_simcore.helpers.faker_factories import ( random_service_access_rights, random_service_meta_data, + random_user, ) from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.postgres_tools import ( @@ -159,6 +161,24 @@ async def user( yield row +@pytest.fixture +async def other_user( + user_id: UserID, + sqlalchemy_async_engine: AsyncEngine, + faker: Faker, +) -> AsyncIterator[dict[str, Any]]: + + _user = random_user(fake=faker, id=user_id + 1) + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + sqlalchemy_async_engine, + table=users, + values=_user, + pk_col=users.c.id, + pk_value=_user["id"], + ) as row: + yield row + + @pytest.fixture() async def user_groups_ids( sqlalchemy_async_engine: AsyncEngine, user: dict[str, Any] @@ -354,12 +374,12 @@ def _fake_factory(**overrides): return _fake_factory -@pytest.fixture() +@pytest.fixture async def create_fake_service_data( user_groups_ids: list[int], products_names: list[str], faker: Faker, -) -> Callable: +) -> CreateFakeServiceDataCallable: """Returns a fake factory that creates catalog DATA that can be used to fill both services_meta_data and services_access_rights tables @@ -376,11 +396,11 @@ async def create_fake_service_data( owner_access, team_access, everyone_access = fake_access_rights """ - everyone_gid, user_gid, team_gid = user_groups_ids + everyone_gid, user_primary_gid, team_standard_gid = user_groups_ids def _random_service(**overrides) -> dict[str, Any]: return random_service_meta_data( - owner_primary_gid=user_gid, + owner_primary_gid=user_primary_gid, fake=faker, **overrides, ) @@ -396,9 +416,9 @@ def _random_access(service, **overrides) -> dict[str, Any]: def _fake_factory( key, version, - team_access=None, - everyone_access=None, - product=products_names[0], + team_access: str | None = None, + everyone_access: str | None = None, + product: ProductName = products_names[0], deprecated: datetime | None = None, ) -> tuple[dict[str, Any], ...]: service = _random_service(key=key, version=version, deprecated=deprecated) @@ -420,7 +440,7 @@ def _fake_factory( fakes.append( _random_access( service, - gid=team_gid, + gid=team_standard_gid, execute_access="x" in team_access, write_access="w" in team_access, product_name=product, diff --git a/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py b/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py index 8c2071fea23..732dda730c1 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py @@ -56,9 +56,9 @@ async def test_list_services_with_details( url = URL("/v0/services").with_query({"user_id": user_id, "details": "true"}) # now fake the director such that it returns half the services - fake_registry_service_data = ServiceMetaDataPublished.model_config[ - "json_schema_extra" - ]["examples"][0] + fake_registry_service_data = ServiceMetaDataPublished.model_json_schema()[ + "examples" + ][0] mocked_director_service_api_base.get("/services", name="list_services").respond( 200, @@ -240,7 +240,9 @@ async def test_list_services_that_are_deprecated( ): # injects fake data in db - deprecation_date = datetime.utcnow() + timedelta(days=1) + deprecation_date = datetime.utcnow() + timedelta( # NOTE: old offset-naive column + days=1 + ) deprecated_service = create_fake_service_data( "simcore/services/dynamic/jupyterlab", "1.0.1", @@ -262,9 +264,9 @@ async def test_list_services_that_are_deprecated( assert received_service.deprecated == deprecation_date # for details, the director must return the same service - fake_registry_service_data = ServiceMetaDataPublished.model_config[ - "json_schema_extra" - ]["examples"][0] + fake_registry_service_data = ServiceMetaDataPublished.model_json_schema()[ + "examples" + ][0] mocked_director_service_api_base.get("/services", name="list_services").respond( 200, json={ diff --git a/services/catalog/tests/unit/with_dbs/test_api_rpc.py b/services/catalog/tests/unit/with_dbs/test_api_rpc.py index 3192eabbfe6..830650729ba 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rpc.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rpc.py @@ -5,7 +5,7 @@ # pylint: disable=unused-variable -from collections.abc import AsyncIterator, Callable +from collections.abc import Callable from typing import Any import pytest @@ -16,9 +16,8 @@ from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID from pydantic import ValidationError -from pytest_simcore.helpers.faker_factories import random_user +from pytest_simcore.helpers.faker_factories import random_icon_url from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan from pytest_simcore.helpers.typing_env import EnvVarsDict from respx.router import MockRouter from servicelib.rabbitmq import RabbitMQRPCClient @@ -27,13 +26,12 @@ CatalogItemNotFoundError, ) from servicelib.rabbitmq.rpc_interfaces.catalog.services import ( + batch_get_my_services, check_for_service, get_service, list_services_paginated, update_service, ) -from simcore_postgres_database.models.users import users -from sqlalchemy.ext.asyncio import AsyncEngine pytest_simcore_core_services_selection = [ "rabbit", @@ -134,6 +132,7 @@ async def test_rpc_catalog_client( product_name: ProductName, user_id: UserID, app: FastAPI, + faker: Faker, ): assert app @@ -163,8 +162,8 @@ async def test_rpc_catalog_client( assert got.key == service_key assert got.version == service_version - assert got == next( - item + assert got.model_dump(exclude={"history"}) == next( + item.model_dump(exclude={"release"}) for item in page.data if (item.key == service_key and item.version == service_version) ) @@ -178,6 +177,7 @@ async def test_rpc_catalog_client( update={ "name": "foo", "description": "bar", + "icon": random_icon_url(faker), "version_display": "this is a nice version", "description_ui": True, # owner activates wiki view }, # type: ignore @@ -189,6 +189,7 @@ async def test_rpc_catalog_client( assert updated.description == "bar" assert updated.description_ui assert updated.version_display == "this is a nice version" + assert updated.icon is not None assert not updated.classifiers got = await get_service( @@ -257,24 +258,6 @@ async def test_rpc_check_for_service( ) -@pytest.fixture -async def other_user( - user_id: UserID, - sqlalchemy_async_engine: AsyncEngine, - faker: Faker, -) -> AsyncIterator[dict[str, Any]]: - - _user = random_user(fake=faker, id=user_id + 1) - async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup - sqlalchemy_async_engine, - table=users, - values=_user, - pk_col=users.c.id, - pk_value=_user["id"], - ) as row: - yield row - - async def test_rpc_get_service_access_rights( background_sync_task_mocked: None, mocked_director_service_api: MockRouter, @@ -415,3 +398,82 @@ async def test_rpc_get_service_access_rights( "name": "foo", "description": "bar", } + + +async def test_rpc_batch_get_my_services( + background_sync_task_mocked: None, + mocked_director_service_api: MockRouter, + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + user: dict[str, Any], + user_id: UserID, + app: FastAPI, + create_fake_service_data: Callable, + services_db_tables_injector: Callable, +): + # Create fake services data + service_key = "simcore/services/comp/test-batch-service" + service_version_1 = "1.0.0" + service_version_2 = "1.0.5" + + other_service_key = "simcore/services/comp/other-batch-service" + other_service_version = "1.0.0" + + fake_service_1 = create_fake_service_data( + service_key, + service_version_1, + team_access=None, + everyone_access=None, + product=product_name, + ) + fake_service_2 = create_fake_service_data( + service_key, + service_version_2, + team_access="x", + everyone_access=None, + product=product_name, + ) + fake_service_3 = create_fake_service_data( + other_service_key, + other_service_version, + team_access=None, + everyone_access=None, + product=product_name, + ) + + # Inject fake services into the database + await services_db_tables_injector([fake_service_1, fake_service_2, fake_service_3]) + + # Batch get my services: project with two, not three + ids = [ + (service_key, service_version_1), + (other_service_key, other_service_version), + ] + + my_services = await batch_get_my_services( + rpc_client, + product_name=product_name, + user_id=user_id, + ids=ids, + ) + + assert len(my_services) == 2 + + # Check access rights to all of them + assert my_services[0].my_access_rights.model_dump() == { + "execute": True, + "write": True, + } + assert my_services[0].owner == user["primary_gid"] + assert my_services[0].key == service_key + assert my_services[0].release.version == service_version_1 + assert my_services[0].release.compatibility + assert ( + my_services[0].release.compatibility.can_update_to.version == service_version_2 + ) + + assert my_services[1].my_access_rights.model_dump() == { + "execute": True, + "write": True, + } + assert my_services[1].owner == user["primary_gid"] diff --git a/services/catalog/tests/unit/with_dbs/test_db_repositories.py b/services/catalog/tests/unit/with_dbs/test_db_repositories.py index 8c4053c4ca6..e8990527c04 100644 --- a/services/catalog/tests/unit/with_dbs/test_db_repositories.py +++ b/services/catalog/tests/unit/with_dbs/test_db_repositories.py @@ -1,7 +1,9 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable +# pylint: disable=too-many-arguments +from collections import Counter from collections.abc import Callable from dataclasses import dataclass, field @@ -9,12 +11,13 @@ from models_library.products import ProductName from models_library.users import UserID from packaging import version -from packaging.version import Version -from pydantic import EmailStr, TypeAdapter +from pydantic import EmailStr, HttpUrl, TypeAdapter from simcore_service_catalog.db.repositories.services import ServicesRepository from simcore_service_catalog.models.services_db import ( ServiceAccessRightsAtDB, - ServiceMetaDataAtDB, + ServiceMetaDataDBCreate, + ServiceMetaDataDBGet, + ServiceMetaDataDBPatch, ) from simcore_service_catalog.utils.versioning import is_patch_release from sqlalchemy.ext.asyncio import AsyncEngine @@ -109,19 +112,44 @@ async def test_create_services( ) # validation - service = ServiceMetaDataAtDB.model_validate(fake_service) + service_db_create = ServiceMetaDataDBCreate.model_validate(fake_service) service_access_rights = [ ServiceAccessRightsAtDB.model_validate(a) for a in fake_access_rights ] new_service = await services_repo.create_or_update_service( - service, service_access_rights + service_db_create, service_access_rights ) - assert ( - new_service.model_dump(include=set(fake_service.keys())) == service.model_dump() + assert new_service.model_dump( + include=service_db_create.model_fields_set + ) == service_db_create.model_dump(exclude_unset=True) + + +@pytest.mark.parametrize( + "url_object", + [ + "https://github.com/some/path/to/image.png?raw=true", + TypeAdapter(HttpUrl).validate_python( + "https://github.com/some/path/to/image.png?raw=true" + ), + "", + None, + ], +) +async def test_regression_service_meta_data_db_create( + create_fake_service_data: Callable, url_object: str | HttpUrl | None +): + fake_service, *_ = create_fake_service_data( + "simcore/services/dynamic/jupyterlab", + "1.0.0", + team_access="x", + everyone_access="x", ) + fake_service["icon"] = url_object + assert ServiceMetaDataDBCreate.model_validate(fake_service) + async def test_read_services( services_repo: ServicesRepository, @@ -201,7 +229,7 @@ async def test_list_service_releases( fake_catalog_with_jupyterlab: FakeCatalogInfo, services_repo: ServicesRepository, ): - services: list[ServiceMetaDataAtDB] = await services_repo.list_service_releases( + services: list[ServiceMetaDataDBGet] = await services_repo.list_service_releases( "simcore/services/dynamic/jupyterlab" ) assert len(services) == fake_catalog_with_jupyterlab.expected_services_count @@ -238,19 +266,19 @@ async def test_list_service_releases_version_filtered( assert latest assert latest.version == fake_catalog_with_jupyterlab.expected_latest - releases_1_1_x: list[ - ServiceMetaDataAtDB - ] = await services_repo.list_service_releases( - "simcore/services/dynamic/jupyterlab", major=1, minor=1 + releases_1_1_x: list[ServiceMetaDataDBGet] = ( + await services_repo.list_service_releases( + "simcore/services/dynamic/jupyterlab", major=1, minor=1 + ) ) assert [ s.version for s in releases_1_1_x ] == fake_catalog_with_jupyterlab.expected_1_1_x - expected_0_x_x: list[ - ServiceMetaDataAtDB - ] = await services_repo.list_service_releases( - "simcore/services/dynamic/jupyterlab", major=0 + expected_0_x_x: list[ServiceMetaDataDBGet] = ( + await services_repo.list_service_releases( + "simcore/services/dynamic/jupyterlab", major=0 + ) ) assert [ s.version for s in expected_0_x_x @@ -282,16 +310,13 @@ async def test_list_all_services_and_history( assert len(services_items) == 1 assert total_count == 1 + # latest assert services_items[0].key == "simcore/services/dynamic/jupyterlab" - history = services_items[0].history - assert len(history) == fake_catalog_with_jupyterlab.expected_services_count - - # latest, ..., first version - assert history[0].version == fake_catalog_with_jupyterlab.expected_latest + assert services_items[0].version == fake_catalog_with_jupyterlab.expected_latest - # check sorted - got_versions = [Version(h.version) for h in history] - assert got_versions == sorted(got_versions, reverse=True) + assert ( + len(services_items[0].history) == 0 + ), "list_latest_service does NOT show history" async def test_listing_with_no_services( @@ -338,7 +363,7 @@ async def test_list_all_services_and_history_with_pagination( assert total_count == num_services for service in services_items: - assert len(service.history) == num_versions_per_service + assert len(service.history) == 0, "Do not show history in listing" assert service.version == expected_latest_version _, services_items = await services_repo.list_latest_services( @@ -347,14 +372,22 @@ async def test_list_all_services_and_history_with_pagination( assert len(services_items) == 2 for service in services_items: - assert len(service.history) == num_versions_per_service + assert len(service.history) == 0, "Do not show history in listing" assert TypeAdapter(EmailStr).validate_python( service.owner_email ), "resolved own'es email" - expected_latest_version = service.history[0].version # latest service is first - assert service.version == expected_latest_version + duplicates = [ + service_key + for service_key, count in Counter( + service.key for service in services_items + ).items() + if count > 1 + ] + assert ( + not duplicates + ), f"list of latest versions of services cannot have duplicates, found: {duplicates}" async def test_get_and_update_service_meta_data( @@ -386,15 +419,15 @@ async def test_get_and_update_service_meta_data( assert got.version == service_version await services_repo.update_service( - ServiceMetaDataAtDB.model_construct( - key=service_key, version=service_version, name="foo" - ), + service_key, + service_version, + ServiceMetaDataDBPatch(name="foo"), ) updated = await services_repo.get_service(service_key, service_version) + assert updated - assert got.model_copy(update={"name": "foo"}) == updated - - assert await services_repo.get_service(service_key, service_version) == updated + expected = got.model_copy(update={"name": "foo", "modified": updated.modified}) + assert updated == expected async def test_can_get_service( diff --git a/services/catalog/tests/unit/with_dbs/test_services_services_api.py b/services/catalog/tests/unit/with_dbs/test_services_services_api.py index bcfae48d319..7deccf3be15 100644 --- a/services/catalog/tests/unit/with_dbs/test_services_services_api.py +++ b/services/catalog/tests/unit/with_dbs/test_services_services_api.py @@ -1,16 +1,23 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable +# pylint: disable=too-many-arguments + from collections.abc import Callable +from datetime import datetime, timedelta from typing import Any import pytest from fastapi import FastAPI +from models_library.api_schemas_catalog.services import MyServiceGet from models_library.products import ProductName from models_library.users import UserID +from pydantic import TypeAdapter +from pytest_simcore.helpers.catalog_services import CreateFakeServiceDataCallable from respx.router import MockRouter from simcore_service_catalog.api.dependencies.director import get_director_api +from simcore_service_catalog.db.repositories.groups import GroupsRepository from simcore_service_catalog.db.repositories.services import ServicesRepository from simcore_service_catalog.services import manifest, services_api from simcore_service_catalog.services.director import DirectorApi @@ -29,6 +36,11 @@ def services_repo(sqlalchemy_async_engine: AsyncEngine): return ServicesRepository(sqlalchemy_async_engine) +@pytest.fixture +def groups_repo(sqlalchemy_async_engine: AsyncEngine): + return GroupsRepository(sqlalchemy_async_engine) + + @pytest.fixture def num_services() -> int: return 5 @@ -78,7 +90,10 @@ async def background_sync_task_mocked( services_db_tables_injector: Callable, fake_services_data: list, ) -> None: - # inject db services (typically done by the sync background task) + """ + Emulates a sync backgroundtask that injects + some services in the db + """ await services_db_tables_injector(fake_services_data) @@ -110,7 +125,7 @@ async def test_list_services_paginated( assert not mocked_director_service_api["get_service"].called - total_count, page_items = await services_api.list_services_paginated( + total_count, page_items = await services_api.list_latest_services( services_repo, director_client, product_name=target_product, @@ -128,7 +143,6 @@ async def test_list_services_paginated( for item in page_items: assert item.access_rights assert item.owner is not None - assert item.history[0].version == item.version got = await services_api.get_service( services_repo, @@ -139,7 +153,123 @@ async def test_list_services_paginated( service_version=item.version, ) - assert got == item + assert got.model_dump(exclude={"history"}) == item.model_dump( + exclude={"release"} + ) + assert item.release in got.history # since it is cached, it should only call it `limit` times assert mocked_director_service_api["get_service"].call_count == limit + + +async def test_batch_get_my_services( + background_tasks_setup_disabled: None, + rabbitmq_and_rpc_setup_disabled: None, + mocked_director_service_api: MockRouter, + target_product: ProductName, + services_repo: ServicesRepository, + groups_repo: GroupsRepository, + user_id: UserID, + user: dict[str, Any], + other_user: dict[str, Any], + create_fake_service_data: CreateFakeServiceDataCallable, + services_db_tables_injector: Callable, +): + # catalog + service_key = "simcore/services/comp/some-service" + service_version_1 = "1.0.0" # can upgrade to 1.0.1 + service_version_2 = "1.0.10" # latest + + other_service_key = "simcore/services/comp/other-service" + other_service_version = "2.1.2" + + expected_retirement = datetime.utcnow() + timedelta( + days=1 + ) # NOTE: old offset-naive column + + # Owned by user + fake_service_1 = create_fake_service_data( + service_key, + service_version_1, + team_access=None, + everyone_access=None, + product=target_product, + deprecated=expected_retirement, + ) + fake_service_2 = create_fake_service_data( + service_key, + service_version_2, + team_access="x", + everyone_access=None, + product=target_product, + ) + + # Owned by other-user + fake_service_3 = create_fake_service_data( + other_service_key, + other_service_version, + team_access=None, + everyone_access=None, + product=target_product, + ) + _service, _owner_access = fake_service_3 + _service["owner"] = other_user["primary_gid"] + _owner_access["gid"] = other_user["primary_gid"] + + # Inject fake services into the database + await services_db_tables_injector([fake_service_1, fake_service_2, fake_service_3]) + + # UNDER TEST ------------------------------- + + # Batch get services e.g. services in a project + services_ids = [ + (service_key, service_version_1), + (other_service_key, other_service_version), + ] + + my_services = await services_api.batch_get_my_services( + services_repo, + groups_repo, + product_name=target_product, + user_id=user_id, + ids=services_ids, + ) + + # CHECKS ------------------------------- + + # assert returned order and length as ids + assert services_ids == [(sc.key, sc.release.version) for sc in my_services] + + assert my_services == TypeAdapter(list[MyServiceGet]).validate_python( + [ + { + "key": "simcore/services/comp/some-service", + "release": { + "version": service_version_1, + "version_display": None, + "released": my_services[0].release.released, + "retired": expected_retirement, + "compatibility": { + "can_update_to": {"version": service_version_2} + }, # can be updated + }, + "owner": user["primary_gid"], + "my_access_rights": {"execute": True, "write": True}, # full access + }, + { + "key": "simcore/services/comp/other-service", + "release": { + "version": other_service_version, + "version_display": None, + "released": my_services[1].release.released, + "retired": None, + "compatibility": None, # cannot be updated + }, + "owner": other_user["primary_gid"], # needs to request access + "my_access_rights": { + "execute": False, + "write": False, + }, + }, + ] + ) diff --git a/services/clusters-keeper/docker/boot.sh b/services/clusters-keeper/docker/boot.sh index 7623ff78e43..e25a2bb280c 100755 --- a/services/clusters-keeper/docker/boot.sh +++ b/services/clusters-keeper/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/clusters-keeper - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index 9284af99ada..d39407abe82 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -85,7 +85,32 @@ attrs==24.2.0 # jsonschema # referencing boto3==1.35.36 - # via aiobotocore + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiobotocore botocore==1.35.36 # via # aiobotocore @@ -163,6 +188,9 @@ fastapi==0.115.6 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.33 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -449,6 +477,8 @@ psutil==6.1.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # distributed +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.3 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -524,6 +554,30 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -605,6 +659,30 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -662,6 +740,10 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in tblib==3.0.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -692,13 +774,13 @@ typer==0.15.1 # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -types-aiobotocore==2.15.2.post3 +types-aiobotocore==2.19.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -types-aiobotocore-ec2==2.15.2 +types-aiobotocore-ec2==2.19.0 # via types-aiobotocore -types-aiobotocore-s3==2.15.2.post1 +types-aiobotocore-s3==2.19.0 # via types-aiobotocore -types-aiobotocore-ssm==2.15.2 +types-aiobotocore-ssm==2.19.0 # via types-aiobotocore types-awscrt==0.23.3 # via botocore-stubs diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index 554ed0d38d2..275b2725230 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -33,7 +33,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -61,7 +61,7 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto charset-normalizer==3.4.0 # via @@ -71,32 +71,32 @@ click==8.1.7 # via # -c requirements/_base.txt # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto debugpy==1.8.12 # via -r requirements/_test.in -deepdiff==8.1.1 +deepdiff==8.2.0 # via -r requirements/_test.in docker==7.1.0 # via # -r requirements/_test.in # moto -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in -fakeredis==2.26.2 +fakeredis==2.27.0 # via -r requirements/_test.in flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto frozenlist==1.5.0 # via @@ -141,9 +141,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -173,10 +171,8 @@ markupsafe==3.0.2 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy multidict==6.1.0 @@ -190,7 +186,7 @@ openapi-schema-validator==0.6.3 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via moto -orderly-set==5.2.3 +orderly-set==5.3.0 # via deepdiff packaging==24.2 # via @@ -213,7 +209,7 @@ psutil==6.1.0 # via # -c requirements/_base.txt # -r requirements/_test.in -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi @@ -228,7 +224,7 @@ pydantic-core==2.27.1 # pydantic pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -248,7 +244,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -259,7 +254,6 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -299,7 +293,7 @@ s3transfer==0.10.4 # via # -c requirements/_base.txt # boto3 -setuptools==75.8.0 +setuptools==75.8.2 # via moto six==1.17.0 # via @@ -325,9 +319,10 @@ typing-extensions==4.12.2 # anyio # aws-sam-translator # cfn-lint - # faker # pydantic # pydantic-core +tzdata==2025.1 + # via faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt @@ -339,6 +334,7 @@ urllib3==2.2.3 werkzeug==3.1.3 # via # flask + # flask-cors # moto wrapt==1.17.0 # via diff --git a/services/clusters-keeper/requirements/_tools.txt b/services/clusters-keeper/requirements/_tools.txt index 473aca0228f..6c7b9431172 100644 --- a/services/clusters-keeper/requirements/_tools.txt +++ b/services/clusters-keeper/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -44,7 +44,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via # -c requirements/_test.txt # pip-tools @@ -81,7 +81,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py index ac3955a3f25..ad354a2c8b1 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py @@ -4,7 +4,7 @@ from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -45,7 +45,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: if app.state.settings.CLUSTERS_KEEPER_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) if app.state.settings.CLUSTERS_KEEPER_TRACING: - setup_tracing( + initialize_tracing( app, app.state.settings.CLUSTERS_KEEPER_TRACING, APP_NAME, diff --git a/services/dask-sidecar/docker/boot.sh b/services/dask-sidecar/docker/boot.sh index 2cfdbf349d7..89a4d14afc6 100755 --- a/services/dask-sidecar/docker/boot.sh +++ b/services/dask-sidecar/docker/boot.sh @@ -25,7 +25,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then python --version | sed 's/^/ /' command -v python | sed 's/^/ /' cd services/dask-sidecar - uv pip sync --quiet --no-cache-dir requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - print_info "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # RUNNING application ---------------------------------------- diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 7ba73695252..e06cb5585ee 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -317,6 +317,8 @@ psutil==6.1.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # distributed +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.3 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -376,6 +378,24 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -442,6 +462,24 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications repro-zipfile==0.3.1 @@ -468,6 +506,8 @@ sniffio==1.3.1 # via anyio sortedcontainers==2.4.0 # via distributed +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tblib==3.0.0 # via distributed tenacity==9.0.0 diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index b7de11f2e76..cac785dd3e5 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -9,7 +9,7 @@ attrs==24.2.0 # -c requirements/_base.txt # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -34,7 +34,7 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto charset-normalizer==3.4.0 # via @@ -44,11 +44,11 @@ click==8.1.7 # via # -c requirements/_base.txt # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc @@ -58,13 +58,13 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto graphql-core==3.2.6 # via moto @@ -89,9 +89,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -119,10 +117,8 @@ markupsafe==3.0.2 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy networkx==3.4.2 @@ -144,7 +140,7 @@ ply==3.11 # via jsonpath-ng pprintpp==0.4.0 # via pytest-icdiff -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi @@ -163,7 +159,7 @@ pyopenssl==25.0.0 # via pytest-localftpserver pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -193,7 +189,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -204,7 +199,6 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -235,7 +229,7 @@ rpds-py==0.22.3 # referencing s3transfer==0.10.4 # via boto3 -setuptools==75.8.0 +setuptools==75.8.2 # via moto six==1.17.0 # via @@ -253,10 +247,13 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # aws-sam-translator # cfn-lint - # faker # pydantic # pydantic-core # pyopenssl +tzdata==2024.2 + # via + # -c requirements/_base.txt + # faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt @@ -268,6 +265,7 @@ urllib3==2.2.3 werkzeug==3.1.3 # via # flask + # flask-cors # moto wrapt==1.17.0 # via diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt index 473aca0228f..6c7b9431172 100644 --- a/services/dask-sidecar/requirements/_tools.txt +++ b/services/dask-sidecar/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -44,7 +44,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via # -c requirements/_test.txt # pip-tools @@ -81,7 +81,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py index b6ae0b25611..126485b2645 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py @@ -16,7 +16,6 @@ from dask_task_models_library.container_tasks.errors import ServiceRuntimeError from dask_task_models_library.container_tasks.io import FileUrl, TaskOutputData from dask_task_models_library.container_tasks.protocol import ContainerTaskParameters -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from packaging import version from pydantic import ValidationError @@ -181,7 +180,7 @@ async def run(self, command: list[str]) -> TaskOutputData: num_steps=3, step_weights=[5 / 100, 90 / 100, 5 / 100], progress_report_cb=self.task_publishers.publish_progress, - description=IDStr("running"), + description="running", ) as progress_bar: # PRE-PROCESSING await pull_image( @@ -222,7 +221,7 @@ async def run(self, command: list[str]) -> TaskOutputData: config, name=f"{self.task_parameters.image.split(sep='/')[-1]}_{run_id}", ) as container, progress_bar.sub_progress( - 100, description=IDStr("processing") + 100, description="processing" ) as processing_progress_bar, managed_monitor_container_log_task( container=container, progress_regexp=image_labels.get_progress_regexp(), diff --git a/services/datcore-adapter/docker/boot.sh b/services/datcore-adapter/docker/boot.sh index 74f8d84a7aa..848a70e4733 100755 --- a/services/datcore-adapter/docker/boot.sh +++ b/services/datcore-adapter/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/datcore-adapter - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # RUNNING application ---------------------------------------- diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt index eef071269ca..4b278297517 100644 --- a/services/datcore-adapter/requirements/_base.txt +++ b/services/datcore-adapter/requirements/_base.txt @@ -53,7 +53,20 @@ attrs==23.2.0 # jsonschema # referencing boto3==1.34.75 - # via -r requirements/_base.in + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in botocore==1.34.75 # via # boto3 @@ -97,7 +110,10 @@ fastapi==0.115.5 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager # prometheus-fastapi-instrumentator +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in fastapi-pagination==0.12.31 # via -r requirements/_base.in faststream==0.5.31 @@ -273,6 +289,8 @@ protobuf==4.25.4 # opentelemetry-proto psutil==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -318,6 +336,18 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -369,6 +399,18 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.29.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -409,6 +451,8 @@ starlette==0.41.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==8.5.0 # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==0.12.1 diff --git a/services/datcore-adapter/requirements/_test.in b/services/datcore-adapter/requirements/_test.in index f7b499f2966..be147167572 100644 --- a/services/datcore-adapter/requirements/_test.in +++ b/services/datcore-adapter/requirements/_test.in @@ -6,6 +6,8 @@ asgi_lifespan +botocore-stubs +boto3-stubs coverage faker pytest @@ -19,5 +21,3 @@ pytest-sugar pytest-xdist requests respx -types-boto3 -types-botocore diff --git a/services/datcore-adapter/requirements/_test.txt b/services/datcore-adapter/requirements/_test.txt index bf40c4a5f46..2f03edb0fa2 100644 --- a/services/datcore-adapter/requirements/_test.txt +++ b/services/datcore-adapter/requirements/_test.txt @@ -4,10 +4,12 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -botocore-stubs==1.36.6 +boto3-stubs==1.37.4 + # via -r requirements/_test.in +botocore-stubs==1.37.4 # via - # types-boto3 - # types-botocore + # -r requirements/_test.in + # boto3-stubs certifi==2024.2.2 # via # -c requirements/../../../requirements/constraints.txt @@ -19,13 +21,13 @@ charset-normalizer==3.3.2 # via # -c requirements/_base.txt # requests -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov execnet==2.1.1 # via pytest-xdist -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in h11==0.14.0 # via @@ -58,7 +60,7 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -86,20 +88,12 @@ pytest-sugar==1.0.0 # via -r requirements/_test.in pytest-xdist==3.6.1 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker requests==2.32.3 # via # -c requirements/_base.txt # -r requirements/_test.in respx==0.22.0 # via -r requirements/_test.in -six==1.16.0 - # via - # -c requirements/_base.txt - # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt @@ -108,19 +102,16 @@ sniffio==1.3.1 # httpx termcolor==2.5.0 # via pytest-sugar -types-awscrt==0.23.7 +types-awscrt==0.23.10 # via botocore-stubs -types-boto3==1.36.6 - # via -r requirements/_test.in -types-botocore==1.0.2 - # via -r requirements/_test.in -types-s3transfer==0.11.2 - # via types-boto3 +types-s3transfer==0.11.3 + # via boto3-stubs typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker - # types-boto3 + # boto3-stubs +tzdata==2025.1 + # via faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/datcore-adapter/requirements/_tools.txt b/services/datcore-adapter/requirements/_tools.txt index 54a6c8fbb2a..68ae37614ad 100644 --- a/services/datcore-adapter/requirements/_tools.txt +++ b/services/datcore-adapter/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -42,7 +42,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -53,7 +53,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -65,7 +65,7 @@ pyyaml==6.0.1 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -78,7 +78,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py index 473879a2ac8..337738ecf46 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py @@ -6,6 +6,6 @@ assert get_app # nosec __all__: tuple[str, ...] = ( - "get_reverse_url_mapper", "get_app", + "get_reverse_url_mapper", ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py index 026a7d1c6cc..c7bc55f1561 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py @@ -1,4 +1,4 @@ -from typing import cast +from typing import Annotated, cast from fastapi import Depends, FastAPI from fastapi.requests import Request @@ -11,7 +11,7 @@ def _get_app(request: Request) -> FastAPI: def get_pennsieve_api_client( - app: FastAPI = Depends(_get_app), + app: Annotated[FastAPI, Depends(_get_app)], ) -> PennsieveApiClient: client = PennsieveApiClient.get_instance(app) assert client # nosec diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py deleted file mode 100644 index bcf8cdec9c6..00000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Callable - -from fastapi import HTTPException -from fastapi.encoders import jsonable_encoder -from starlette.requests import Request -from starlette.responses import JSONResponse - - -async def http_error_handler(_: Request, exc: Exception) -> JSONResponse: - assert isinstance(exc, HTTPException) # nosec - return JSONResponse( - content=jsonable_encoder({"errors": [exc.detail]}), status_code=exc.status_code - ) - - -def make_http_error_handler_for_exception( - status_code: int, - exception_cls: type[BaseException], - *, - override_detail_message: str | None = None, -) -> Callable: - """ - Produces a handler for BaseException-type exceptions which converts them - into an error JSON response with a given status code - - SEE https://docs.python.org/3/library/exceptions.html#concrete-exceptions - """ - - async def _http_error_handler(_: Request, exc: type[BaseException]) -> JSONResponse: - assert isinstance(exc, exception_cls) # nosec - details = override_detail_message or f"{exc}" - return JSONResponse( - content=jsonable_encoder({"errors": [details]}), status_code=status_code - ) - - return _http_error_handler diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py deleted file mode 100644 index 3770d62cb23..00000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py +++ /dev/null @@ -1,28 +0,0 @@ -from fastapi.encoders import jsonable_encoder -from fastapi.exceptions import RequestValidationError -from fastapi.openapi.constants import REF_PREFIX -from fastapi.openapi.utils import validation_error_response_definition -from pydantic import ValidationError -from starlette.requests import Request -from starlette.responses import JSONResponse -from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY - - -async def http422_error_handler( - _: Request, - exc: Exception, -) -> JSONResponse: - assert isinstance(exc, RequestValidationError | ValidationError) # nosec - return JSONResponse( - content=jsonable_encoder({"errors": exc.errors()}), - status_code=HTTP_422_UNPROCESSABLE_ENTITY, - ) - - -validation_error_response_definition["properties"] = { - "errors": { - "title": "Validation errors", - "type": "array", - "items": {"$ref": f"{REF_PREFIX}ValidationError"}, - }, -} diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py deleted file mode 100644 index d1c1d8e8410..00000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py +++ /dev/null @@ -1,31 +0,0 @@ -""" - api app module -""" -from botocore.exceptions import ClientError -from fastapi import APIRouter, FastAPI -from fastapi.exceptions import HTTPException, RequestValidationError -from servicelib.fastapi.timing_middleware import add_process_time_header - -from .._meta import API_VTAG -from .errors.http_error import http_error_handler -from .errors.pennsieve_error import botocore_exceptions_handler -from .errors.validation_error import http422_error_handler -from .routes import datasets, files, health, user - - -def setup_api(app: FastAPI): - router = APIRouter() - - app.include_router(router, prefix=f"/{API_VTAG}") - app.include_router(health.router, tags=["healthcheck"], prefix=f"/{API_VTAG}") - app.include_router(user.router, tags=["user"], prefix=f"/{API_VTAG}") - app.include_router(datasets.router, tags=["datasets"], prefix=f"/{API_VTAG}") - app.include_router(files.router, tags=["files"], prefix=f"/{API_VTAG}") - - # exception handlers - app.add_exception_handler(HTTPException, http_error_handler) - app.add_exception_handler(RequestValidationError, http422_error_handler) - app.add_exception_handler(ClientError, botocore_exceptions_handler) - - # middlewares - app.middleware("http")(add_process_time_header) diff --git a/services/storage/src/simcore_service_storage/datcore_adapter/__init__.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/__init__.py similarity index 100% rename from services/storage/src/simcore_service_storage/datcore_adapter/__init__.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/__init__.py diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/datasets.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/datasets.py similarity index 81% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/datasets.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/datasets.py index 7c36b01c3fc..2090a22938c 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/datasets.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/datasets.py @@ -6,10 +6,13 @@ from fastapi_pagination import Page, Params from fastapi_pagination.api import create_page, resolve_params from fastapi_pagination.bases import RawParams +from models_library.api_schemas_datcore_adapter.datasets import ( + DatasetMetaData, + FileMetaData, +) from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette import status -from ...models.domains.datasets import DatasetsOut, FileMetaDataOut from ...modules.pennsieve import PennsieveApiClient from ..dependencies.pennsieve import get_pennsieve_api_client @@ -26,7 +29,7 @@ "/datasets", summary="list datasets", status_code=status.HTTP_200_OK, - response_model=Page[DatasetsOut], + response_model=Page[DatasetMetaData], ) @cancel_on_disconnect @cached( @@ -39,7 +42,7 @@ async def list_datasets( x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], params: Annotated[Params, Depends()], -) -> Page[DatasetsOut]: +) -> Page[DatasetMetaData]: assert request # nosec raw_params: RawParams = resolve_params(params).to_raw_params() assert raw_params.limit is not None # nosec @@ -53,11 +56,36 @@ async def list_datasets( return create_page(datasets, total=total, params=params) # type: ignore[return-value] +@router.get( + "/datasets/{dataset_id}", + status_code=status.HTTP_200_OK, + response_model=DatasetMetaData, +) +@cancel_on_disconnect +async def get_dataset( + request: Request, + x_datcore_api_key: Annotated[str, Header(..., description="Datcore API Key")], + x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], + pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], + params: Annotated[Params, Depends()], + dataset_id: str, +) -> DatasetMetaData: + assert request # nosec + raw_params: RawParams = resolve_params(params).to_raw_params() + assert raw_params.limit is not None # nosec + assert raw_params.offset is not None # nosec + return await pennsieve_client.get_dataset( + api_key=x_datcore_api_key, + api_secret=x_datcore_api_secret, + dataset_id=dataset_id, + ) + + @router.get( "/datasets/{dataset_id}/files", summary="list top level files/folders in a dataset", status_code=status.HTTP_200_OK, - response_model=Page[FileMetaDataOut], + response_model=Page[FileMetaData], ) @cancel_on_disconnect @cached( @@ -71,7 +99,7 @@ async def list_dataset_top_level_files( x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], params: Annotated[Params, Depends()], -) -> Page[FileMetaDataOut]: +) -> Page[FileMetaData]: assert request # nosec raw_params: RawParams = resolve_params(params).to_raw_params() @@ -91,7 +119,7 @@ async def list_dataset_top_level_files( "/datasets/{dataset_id}/files/{collection_id}", summary="list top level files/folders in a collection in a dataset", status_code=status.HTTP_200_OK, - response_model=Page[FileMetaDataOut], + response_model=Page[FileMetaData], ) @cancel_on_disconnect @cached( @@ -106,7 +134,7 @@ async def list_dataset_collection_files( x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], params: Annotated[Params, Depends()], -) -> Page[FileMetaDataOut]: +) -> Page[FileMetaData]: assert request # nosec raw_params: RawParams = resolve_params(params).to_raw_params() assert raw_params.limit is not None # nosec @@ -126,7 +154,7 @@ async def list_dataset_collection_files( "/datasets/{dataset_id}/files_legacy", summary="list all file meta data in dataset", status_code=status.HTTP_200_OK, - response_model=list[FileMetaDataOut], + response_model=list[FileMetaData], ) @cancel_on_disconnect @cached( @@ -139,7 +167,7 @@ async def list_dataset_files_legacy( x_datcore_api_key: Annotated[str, Header(..., description="Datcore API Key")], x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], -) -> list[FileMetaDataOut]: +) -> list[FileMetaData]: assert request # nosec return await pennsieve_client.list_all_dataset_files( api_key=x_datcore_api_key, diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/files.py similarity index 86% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/files.py index 2234c17d3dc..c69cb6d0e0c 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/files.py @@ -1,17 +1,18 @@ import logging -from typing import Annotated, Any +from typing import Annotated from fastapi import APIRouter, Depends, Header, Request +from models_library.api_schemas_datcore_adapter.datasets import PackageMetaData from pydantic import AnyUrl, TypeAdapter from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette import status -from ...models.domains.files import FileDownloadOut +from ...models.files import FileDownloadOut from ...modules.pennsieve import PennsieveApiClient from ..dependencies.pennsieve import get_pennsieve_api_client router = APIRouter() -log = logging.getLogger(__file__) +_logger = logging.getLogger(__file__) @router.get( @@ -62,7 +63,7 @@ async def delete_file( "/packages/{package_id}/files", summary="returns a package (i.e. a file)", status_code=status.HTTP_200_OK, - response_model=list[dict[str, Any]], + response_model=list[PackageMetaData], ) @cancel_on_disconnect async def get_package( @@ -71,12 +72,15 @@ async def get_package( x_datcore_api_key: Annotated[str, Header(..., description="Datcore API Key")], x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], -) -> list[dict[str, Any]]: +) -> list[PackageMetaData]: assert request # nosec - return await pennsieve_client.get_package_files( + + data = await pennsieve_client.get_package_files( api_key=x_datcore_api_key, api_secret=x_datcore_api_secret, package_id=package_id, limit=1, offset=0, + fill_path=True, ) + return [_.to_api_model() for _ in data] diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/health.py similarity index 92% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/health.py index 9db40d3acf9..120767f3d11 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/health.py @@ -1,6 +1,6 @@ import logging from collections.abc import Callable -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import Annotated from fastapi import APIRouter, Depends @@ -24,7 +24,7 @@ status_code=status.HTTP_200_OK, ) async def get_service_alive(): - return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" + return f"{__name__}@{datetime.now(UTC).isoformat()}" @router.get("/ready", status_code=status.HTTP_200_OK, response_model=AppStatusCheck) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/user.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/user.py similarity index 95% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/user.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/user.py index 82f004042fd..dea213f5ec7 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/user.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/user.py @@ -5,7 +5,7 @@ from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette import status -from ...models.domains.user import Profile +from ...models.user import Profile from ...modules.pennsieve import PennsieveApiClient from ..dependencies.pennsieve import get_pennsieve_api_client diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes.py new file mode 100644 index 00000000000..d316434bc98 --- /dev/null +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes.py @@ -0,0 +1,18 @@ +""" +api app module +""" + +from fastapi import APIRouter, FastAPI + +from .._meta import API_VTAG +from .rest import datasets, files, health, user + + +def setup_rest_api_routes(app: FastAPI) -> None: + router = APIRouter() + + app.include_router(router, prefix=f"/{API_VTAG}") + app.include_router(health.router, tags=["healthcheck"], prefix=f"/{API_VTAG}") + app.include_router(user.router, tags=["user"], prefix=f"/{API_VTAG}") + app.include_router(datasets.router, tags=["datasets"], prefix=f"/{API_VTAG}") + app.include_router(files.router, tags=["files"], prefix=f"/{API_VTAG}") diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py index b3b704b110f..60839168e97 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py @@ -1,9 +1,9 @@ import logging import typer -from settings_library.utils_cli import create_settings_command +from settings_library.utils_cli import create_settings_command, create_version_callback -from ._meta import PROJECT_NAME +from ._meta import PROJECT_NAME, __version__ from .core.settings import ApplicationSettings log = logging.getLogger(__name__) @@ -12,13 +12,14 @@ main = typer.Typer(name=PROJECT_NAME) main.command()(create_settings_command(settings_cls=ApplicationSettings, logger=log)) +main.callback()(create_version_callback(__version__)) @main.command() -def run(): +def run() -> None: """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_datcore_adapter.main:the_app", + f"$ uvicorn {PROJECT_NAME}.main:the_app", fg=typer.colors.BLUE, ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py index d653dc6a090..5ecac86b882 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py @@ -1,18 +1,20 @@ import logging -from fastapi import FastAPI, HTTPException -from fastapi.exceptions import RequestValidationError +from common_library.basic_types import BootModeEnum +from fastapi import FastAPI +from fastapi.middleware.gzip import GZipMiddleware +from fastapi_pagination import add_pagination +from servicelib.fastapi import timing_middleware +from servicelib.fastapi.http_error import set_app_default_http_error_handlers from servicelib.fastapi.openapi import override_fastapi_openapi_method from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) -from servicelib.fastapi.tracing import setup_tracing -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.tracing import initialize_tracing +from starlette.middleware.base import BaseHTTPMiddleware from .._meta import API_VERSION, API_VTAG, APP_NAME -from ..api.errors.http_error import http_error_handler -from ..api.errors.validation_error import http422_error_handler -from ..api.module_setup import setup_api +from ..api.routes import setup_rest_api_routes from ..modules import pennsieve from .events import ( create_start_app_handler, @@ -29,22 +31,10 @@ "hpack", ) -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -def create_app(settings: ApplicationSettings | None = None) -> FastAPI: - if settings is None: - settings = ApplicationSettings.create_from_envs() - assert settings # nosec - - logging.basicConfig(level=settings.LOG_LEVEL.value) - logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( - log_format_local_dev_enabled=settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, - tracing_settings=settings.DATCORE_ADAPTER_TRACING, - ) - +def create_app(settings: ApplicationSettings) -> FastAPI: # keep mostly quiet noisy loggers quiet_level: int = max( min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING @@ -52,11 +42,13 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: for name in NOISY_LOGGERS: logging.getLogger(name).setLevel(quiet_level) - logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) + + _logger.debug("App settings:\n%s", settings.model_dump_json(indent=1)) app = FastAPI( - debug=settings.debug, - title="Datcore Adapter Service", + debug=settings.SC_BOOT_MODE + in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], + title=APP_NAME, description="Interfaces with Pennsieve storage service", version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", @@ -64,18 +56,26 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: redoc_url=None, # default disabled ) override_fastapi_openapi_method(app) + add_pagination(app) app.state.settings = settings if app.state.settings.DATCORE_ADAPTER_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) if app.state.settings.DATCORE_ADAPTER_TRACING: - setup_tracing( + initialize_tracing( app, app.state.settings.DATCORE_ADAPTER_TRACING, APP_NAME, ) + if settings.SC_BOOT_MODE != BootModeEnum.PRODUCTION: + # middleware to time requests (ONLY for development) + app.add_middleware( + BaseHTTPMiddleware, dispatch=timing_middleware.add_process_time_header + ) + app.add_middleware(GZipMiddleware) + # events app.add_event_handler("startup", on_startup) app.add_event_handler("startup", create_start_app_handler(app)) @@ -83,12 +83,11 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app.add_event_handler("shutdown", on_shutdown) # Routing - setup_api(app) + setup_rest_api_routes(app) if settings.PENNSIEVE.PENNSIEVE_ENABLED: pennsieve.setup(app, settings.PENNSIEVE) - app.add_exception_handler(HTTPException, http_error_handler) - app.add_exception_handler(RequestValidationError, http422_error_handler) + set_app_default_http_error_handlers(app) return app diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py index c95f13f0ecc..98f091c76e9 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py @@ -1,8 +1,7 @@ -from functools import cached_property from typing import Annotated from common_library.basic_types import DEFAULT_FACTORY -from models_library.basic_types import BootModeEnum, LogLevel +from models_library.basic_types import LogLevel from pydantic import AliasChoices, Field, TypeAdapter, field_validator from pydantic.networks import AnyUrl from servicelib.logging_utils_filtering import LoggerName, MessageSubstring @@ -69,15 +68,6 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] - @cached_property - def debug(self) -> bool: - """If True, debug tracebacks should be returned on errors.""" - return self.SC_BOOT_MODE in [ - BootModeEnum.DEBUG, - BootModeEnum.DEVELOPMENT, - BootModeEnum.LOCAL, - ] - @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: str) -> str: diff --git a/services/storage/tests/fixtures/__init__.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/errors/__init__.py similarity index 100% rename from services/storage/tests/fixtures/__init__.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/errors/__init__.py diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/errors/handlers.py similarity index 62% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/errors/handlers.py index c1101961b34..90561e459f6 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/errors/handlers.py @@ -1,5 +1,7 @@ from botocore.exceptions import ClientError +from fastapi import FastAPI from fastapi.encoders import jsonable_encoder +from servicelib.fastapi.http_error import set_app_default_http_error_handlers from starlette.requests import Request from starlette.responses import JSONResponse from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_500_INTERNAL_SERVER_ERROR @@ -12,12 +14,19 @@ async def botocore_exceptions_handler( assert isinstance(exc, ClientError) # nosec assert "Error" in exc.response # nosec assert "Code" in exc.response["Error"] # nosec + error_content = {"errors": [f"{exc}"]} if exc.response["Error"]["Code"] == "NotAuthorizedException": return JSONResponse( - content=jsonable_encoder({"errors": exc.response["Error"]}), + content=jsonable_encoder({"error": error_content}), status_code=HTTP_401_UNAUTHORIZED, ) return JSONResponse( - content=jsonable_encoder({"errors": exc.response["Error"]}), + content=jsonable_encoder({"error": error_content}), status_code=HTTP_500_INTERNAL_SERVER_ERROR, ) + + +def set_exception_handlers(app: FastAPI) -> None: + set_app_default_http_error_handlers(app) + + app.add_exception_handler(ClientError, botocore_exceptions_handler) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index d163148a200..7bd6a787163 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -1,7 +1,22 @@ -"""Main application to be deployed in for example uvicorn -""" +"""Main application to be deployed in for example uvicorn""" + +import logging + from fastapi import FastAPI +from servicelib.logging_utils import config_all_loggers from simcore_service_datcore_adapter.core.application import create_app +from simcore_service_datcore_adapter.core.settings import ApplicationSettings + +_the_settings = ApplicationSettings.create_from_envs() + +# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 +logging.basicConfig(level=_the_settings.log_level) # NOSONAR +logging.root.setLevel(_the_settings.log_level) +config_all_loggers( + log_format_local_dev_enabled=_the_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_the_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, + tracing_settings=_the_settings.DATCORE_ADAPTER_TRACING, +) # SINGLETON FastAPI app -the_app: FastAPI = create_app() +the_app: FastAPI = create_app(_the_settings) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/datasets.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/datasets.py deleted file mode 100644 index e91d632d30d..00000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/datasets.py +++ /dev/null @@ -1,6 +0,0 @@ -from ..schemas.datasets import DatasetMetaData, FileMetaData - - -DatasetsOut = DatasetMetaData - -FileMetaDataOut = FileMetaData diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/files.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/files.py deleted file mode 100644 index a125faaa5fd..00000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/files.py +++ /dev/null @@ -1,5 +0,0 @@ -from pydantic import AnyUrl, BaseModel - - -class FileDownloadOut(BaseModel): - link: AnyUrl diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/files.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/files.py new file mode 100644 index 00000000000..8275315b42b --- /dev/null +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/files.py @@ -0,0 +1,36 @@ +import datetime +from pathlib import Path +from typing import Annotated + +from models_library.api_schemas_datcore_adapter.datasets import PackageMetaData +from pydantic import AnyUrl, BaseModel, ByteSize, Field + + +class FileDownloadOut(BaseModel): + link: AnyUrl + + +class DatCorePackageMetaData(BaseModel): + id: int + path: Path + display_path: Path + package_id: Annotated[str, Field(alias="packageId")] + name: str + filename: str + s3_bucket: Annotated[str, Field(alias="s3bucket")] + size: ByteSize + created_at: Annotated[datetime.datetime, Field(alias="createdAt")] + updated_at: Annotated[datetime.datetime, Field(alias="updatedAt")] + + def to_api_model(self) -> PackageMetaData: + return PackageMetaData( + path=self.path, + display_path=self.display_path, + package_id=self.package_id, + name=self.name, + filename=self.filename, + s3_bucket=self.s3_bucket, + size=self.size, + created_at=self.created_at, + updated_at=self.updated_at, + ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py deleted file mode 100644 index 5a10a88dfcb..00000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py +++ /dev/null @@ -1,60 +0,0 @@ -from datetime import datetime -from enum import Enum, unique -from pathlib import Path -from typing import Any - -from pydantic import BaseModel - - -class DatasetMetaData(BaseModel): - id: str - display_name: str - - -@unique -class DataType(str, Enum): - FILE = "FILE" - FOLDER = "FOLDER" - - -class FileMetaData(BaseModel): - dataset_id: str - package_id: str - id: str - name: str - type: str - path: Path - size: int - created_at: datetime - last_modified_at: datetime - data_type: DataType - - @classmethod - def from_pennsieve_package( - cls, package: dict[str, Any], files: list[dict[str, Any]], base_path: Path - ): - """creates a FileMetaData from a pennsieve data structure.""" - pck_name: str = package["content"]["name"] - if "extension" in package and not pck_name.endswith(package["extension"]): - pck_name += ".".join((pck_name, package["extension"])) - - file_size = 0 - if package["content"]["packageType"] != "Collection" and files: - file_size = files[0]["content"]["size"] - - return cls( - dataset_id=package["content"]["datasetNodeId"], - package_id=package["content"]["nodeId"], - id=f"{package['content']['id']}", - name=pck_name, - path=base_path / pck_name, - type=package["content"]["packageType"], - size=file_size, - created_at=package["content"]["createdAt"], - last_modified_at=package["content"]["updatedAt"], - data_type=( - DataType.FOLDER - if package["content"]["packageType"] == "Collection" - else DataType.FILE - ), - ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/user.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/user.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/user.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/models/user.py diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py index edd8a4f381c..d1189f6c76c 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py @@ -9,6 +9,12 @@ import boto3 from aiocache import SimpleMemoryCache # type: ignore[import-untyped] from fastapi.applications import FastAPI +from models_library.api_schemas_datcore_adapter.datasets import ( + DatasetMetaData, + DataType, + FileMetaData, +) +from pydantic import ByteSize from servicelib.logging_utils import log_context from servicelib.utils import logged_gather from starlette import status @@ -19,8 +25,8 @@ from tenacity.stop import stop_after_attempt from ..core.settings import PennsieveSettings -from ..models.domains.user import Profile -from ..models.schemas.datasets import DatasetMetaData, FileMetaData +from ..models.files import DatCorePackageMetaData +from ..models.user import Profile from ..utils.client_base import BaseServiceClientApi, setup_client_instance logger = logging.getLogger(__name__) @@ -29,6 +35,36 @@ _GATHER_MAX_CONCURRENCY = 10 +def _to_file_meta_data( + package: dict[str, Any], files: list[DatCorePackageMetaData], base_path: Path +) -> FileMetaData: + """creates a FileMetaData from a pennsieve data structure.""" + pck_name: str = package["content"]["name"] + if "extension" in package and not pck_name.endswith(package["extension"]): + pck_name += ".".join((pck_name, package["extension"])) + + file_size = 0 + if package["content"]["packageType"] != "Collection" and files: + file_size = files[0].size + + return FileMetaData( + dataset_id=package["content"]["datasetNodeId"], + package_id=package["content"]["nodeId"], + id=f"{package['content']['id']}", + name=pck_name, + path=base_path / pck_name, + type=package["content"]["packageType"], + size=file_size, + created_at=package["content"]["createdAt"], + last_modified_at=package["content"]["updatedAt"], + data_type=( + DataType.FOLDER + if package["content"]["packageType"] == "Collection" + else DataType.FILE + ), + ) + + def _compute_file_path( all_packages: dict[str, dict[str, Any]], pck: dict[str, Any] ) -> Path: @@ -46,9 +82,9 @@ class PennsieveAuthorizationHeaders(TypedDict): Authorization: str -_TTL_CACHE_AUTHORIZATION_HEADERS_SECONDS: Final[ - int -] = 3530 # NOTE: observed while developing this code, pennsieve authorizes 3600 seconds, so we cache a bit less +_TTL_CACHE_AUTHORIZATION_HEADERS_SECONDS: Final[int] = ( + 3530 # NOTE: observed while developing this code, pennsieve authorizes 3600 seconds, so we cache a bit less +) ExpirationTimeSecs = int @@ -215,27 +251,66 @@ async def _get_package( ) async def get_package_files( - self, api_key: str, api_secret: str, package_id: str, limit: int, offset: int - ) -> list[dict[str, Any]]: - return cast( - list[dict[str, Any]], - await self._request( - api_key, - api_secret, - "GET", - f"/packages/{package_id}/files", - params={"limit": limit, "offset": offset}, - ), + self, + *, + api_key: str, + api_secret: str, + package_id: str, + limit: int, + offset: int, + fill_path: bool, + ) -> list[DatCorePackageMetaData]: + raw_data = await self._request( + api_key, + api_secret, + "GET", + f"/packages/{package_id}/files", + params={"limit": limit, "offset": offset}, ) + path = display_path = Path() + if fill_path: + package_info = await self._get_package(api_key, api_secret, package_id) + dataset_id = package_info["content"]["datasetId"] + dataset = await self._get_dataset(api_key, api_secret, dataset_id) + + path = ( + Path(dataset_id) + / Path( + "/".join( + ancestor["content"]["id"] + for ancestor in package_info.get("ancestors", []) + ) + ) + / Path(package_info["content"]["name"]) + ) + display_path = ( + Path(dataset["content"]["name"]) + / Path( + "/".join( + ancestor["content"]["name"] + for ancestor in package_info.get("ancestors", []) + ) + ) + / Path(package_info["content"]["name"]) + ) + + return [ + DatCorePackageMetaData(**_["content"], path=path, display_path=display_path) + for _ in raw_data + ] async def _get_pck_id_files( self, api_key: str, api_secret: str, pck_id: str, pck: dict[str, Any] - ) -> tuple[str, list[dict[str, Any]]]: - + ) -> tuple[str, list[DatCorePackageMetaData]]: return ( pck_id, await self.get_package_files( - api_key, api_secret, pck["content"]["nodeId"], limit=1, offset=0 + api_key=api_key, + api_secret=api_secret, + package_id=pck["content"]["nodeId"], + limit=1, + offset=0, + fill_path=False, ), ) @@ -272,12 +347,25 @@ async def list_datasets( DatasetMetaData( id=d["content"]["id"], display_name=d["content"]["name"], + size=ByteSize(d["storage"]) if d["storage"] > 0 else None, ) for d in dataset_page["datasets"] ], dataset_page["totalCount"], ) + async def get_dataset( + self, api_key: str, api_secret: str, dataset_id: str + ) -> DatasetMetaData: + dataset_pck = await self._get_dataset(api_key, api_secret, dataset_id) + return DatasetMetaData( + id=dataset_pck["content"]["id"], + display_name=dataset_pck["content"]["name"], + size=( + ByteSize(dataset_pck["storage"]) if dataset_pck["storage"] > 0 else None + ), + ) + async def list_packages_in_dataset( self, api_key: str, @@ -293,7 +381,7 @@ async def list_packages_in_dataset( for pck in islice(dataset_pck["children"], offset, offset + limit) if pck["content"]["packageType"] != "Collection" ] - package_files = dict( + package_files: dict[str, list[DatCorePackageMetaData]] = dict( await logged_gather( *package_files_tasks, log=logger, @@ -302,7 +390,7 @@ async def list_packages_in_dataset( ) return ( [ - FileMetaData.from_pennsieve_package( + _to_file_meta_data( pck, ( package_files[pck["content"]["id"]] @@ -353,7 +441,7 @@ async def list_packages_in_collection( return ( [ - FileMetaData.from_pennsieve_package( + _to_file_meta_data( pck, ( package_files[pck["content"]["id"]] @@ -433,7 +521,7 @@ async def list_all_dataset_files( file_path = base_path / _compute_file_path(all_packages, package) file_meta_data.append( - FileMetaData.from_pennsieve_package( + _to_file_meta_data( package, package_files[package_id], file_path.parent ) ) @@ -445,11 +533,16 @@ async def get_presigned_download_link( ) -> URL: """returns the presigned download link of the first file in the package""" files = await self.get_package_files( - api_key, api_secret, package_id, limit=1, offset=0 + api_key=api_key, + api_secret=api_secret, + package_id=package_id, + limit=1, + offset=0, + fill_path=False, ) # NOTE: this was done like this in the original dsm. we might encounter a problem when there are more than one files assert len(files) == 1 # nosec - file_id = files[0]["content"]["id"] + file_id = files[0].id file_link = cast( dict[str, Any], await self._request( diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index e58076ad127..6090efe85ae 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -15,6 +15,7 @@ import simcore_service_datcore_adapter from asgi_lifespan import LifespanManager from fastapi.applications import FastAPI +from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_datcore_adapter.modules.pennsieve import ( @@ -24,6 +25,7 @@ from starlette.testclient import TestClient pytest_plugins = [ + "pytest_simcore.cli_runner", "pytest_simcore.environment_configs", "pytest_simcore.repository_paths", "pytest_simcore.pytest_global_environs", @@ -229,7 +231,13 @@ def pennsieve_random_fake_datasets( ) -> dict[str, Any]: return { "datasets": [ - {"content": {"id": create_pennsieve_fake_dataset_id(), "name": fake.text()}} + { + "content": { + "id": create_pennsieve_fake_dataset_id(), + "name": fake.text(), + }, + "storage": fake.pyint(), + } for _ in range(10) ], "totalCount": 20, @@ -306,7 +314,11 @@ async def pennsieve_subsystem_mock( ).respond( status.HTTP_200_OK, json={ - "content": {"name": "Some dataset name that is awesome"}, + "content": { + "name": "Some dataset name that is awesome", + "id": pennsieve_dataset_id, + }, + "storage": fake.pyint(), "children": pennsieve_mock_dataset_packages["packages"], }, ) @@ -317,15 +329,40 @@ async def pennsieve_subsystem_mock( # get collection packages mock.get( - f"https://api.pennsieve.io/packages/{pennsieve_collection_id}" + rf"https://api.pennsieve.io/packages/{pennsieve_collection_id}" ).respond( status.HTTP_200_OK, json={ "content": {"name": "this package name is also awesome"}, "children": pennsieve_mock_dataset_packages["packages"], "ancestors": [ - {"content": {"name": "Bigger guy"}}, - {"content": {"name": "Big guy"}}, + { + "content": { + "name": "Bigger guy", + } + }, + { + "content": { + "name": "Big guy", + } + }, + ], + }, + ) + # get package ancestry + mock.get( + url__regex=rf"https://api.pennsieve.io/packages/{pennsieve_file_id}\?includeAncestors=(?P.+)$" + ).respond( + status.HTTP_200_OK, + json={ + "content": { + "datasetId": pennsieve_dataset_id, + "name": pennsieve_file_id, + }, + "ancestors": [ + {"content": {"id": faker.pystr(), "name": faker.name()}}, + {"content": {"id": faker.pystr(), "name": faker.name()}}, + {"content": {"id": faker.pystr(), "name": faker.name()}}, ], }, ) @@ -334,7 +371,22 @@ async def pennsieve_subsystem_mock( url__regex=r"https://api.pennsieve.io/packages/.+/files\?limit=1&offset=0$" ).respond( status.HTTP_200_OK, - json=[{"content": {"size": 12345, "id": "fake_file_id"}}], + json=[ + jsonable_encoder( + { + "content": { + "size": 12345, + "id": faker.pyint(), + "packageId": "N:package:475beff2-03c8-4dca-a221-d1d02e17f064", + "name": faker.file_name(), + "filename": faker.file_name(), + "s3bucket": faker.pystr(), + "createdAt": faker.date_time(), + "updatedAt": faker.date_time(), + } + } + ) + ], ) # download file diff --git a/services/datcore-adapter/tests/unit/test_cli.py b/services/datcore-adapter/tests/unit/test_cli.py new file mode 100644 index 00000000000..ef7b2b8a4f6 --- /dev/null +++ b/services/datcore-adapter/tests/unit/test_cli.py @@ -0,0 +1,35 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import os + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_datcore_adapter._meta import API_VERSION +from simcore_service_datcore_adapter.cli import main +from simcore_service_datcore_adapter.core.settings import ApplicationSettings +from typer.testing import CliRunner + + +def test_cli_help_and_version(cli_runner: CliRunner): + result = cli_runner.invoke(main, "--help") + assert result.exit_code == os.EX_OK, result.output + + result = cli_runner.invoke(main, "--version") + assert result.exit_code == os.EX_OK, result.output + assert result.stdout.strip() == API_VERSION + + +def test_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK + + print(result.output) + settings = ApplicationSettings(result.output) + assert settings.model_dump() == ApplicationSettings.create_from_envs().model_dump() + + +def test_run(cli_runner: CliRunner): + result = cli_runner.invoke(main, ["run"]) + assert result.exit_code == 0 + assert "disabled" in result.stdout diff --git a/services/datcore-adapter/tests/unit/test_exceptions_handlers.py b/services/datcore-adapter/tests/unit/test_exceptions_handlers.py new file mode 100644 index 00000000000..53a28bb736c --- /dev/null +++ b/services/datcore-adapter/tests/unit/test_exceptions_handlers.py @@ -0,0 +1,162 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from collections.abc import AsyncIterator + +import httpx +import pytest +from botocore.exceptions import ClientError +from fastapi import FastAPI, HTTPException, status +from fastapi.exceptions import RequestValidationError +from httpx import AsyncClient +from pydantic import ValidationError +from pytest_simcore.helpers.httpx_assert_checks import assert_status +from simcore_service_datcore_adapter.errors.handlers import set_exception_handlers + + +@pytest.fixture +def initialized_app() -> FastAPI: + app = FastAPI() + set_exception_handlers(app) + return app + + +@pytest.fixture +async def client(initialized_app: FastAPI) -> AsyncIterator[AsyncClient]: + async with AsyncClient( + transport=httpx.ASGITransport(app=initialized_app), + base_url="http://test", + headers={"Content-Type": "application/json"}, + ) as client: + yield client + + +@pytest.mark.parametrize( + "exception, status_code", + [ + ( + ClientError( + { + "Status": "pytest status", + "StatusReason": "pytest", + "Error": { + "Code": "NotAuthorizedException", + "Message": "pytest message", + }, + }, + operation_name="pytest operation", + ), + status.HTTP_401_UNAUTHORIZED, + ), + ( + ClientError( + { + "Status": "pytest status", + "StatusReason": "pytest", + "Error": { + "Code": "Whatever", + "Message": "pytest message", + }, + }, + operation_name="pytest operation", + ), + status.HTTP_500_INTERNAL_SERVER_ERROR, + ), + ( + NotImplementedError("pytest not implemented error"), + status.HTTP_501_NOT_IMPLEMENTED, + ), + ], + ids=str, +) +async def test_exception_handlers( + initialized_app: FastAPI, + client: AsyncClient, + exception: Exception, + status_code: int, +): + @initialized_app.get("/test") + async def test_endpoint(): + raise exception + + response = await client.get("/test") + assert_status( + response, + status_code, + None, + expected_msg=f"{exception}".replace("(", "\\(").replace(")", "\\)"), + ) + + +async def test_generic_http_exception_handler( + initialized_app: FastAPI, client: AsyncClient +): + @initialized_app.get("/test") + async def test_endpoint(): + raise HTTPException(status_code=status.HTTP_410_GONE) + + response = await client.get("/test") + assert_status(response, status.HTTP_410_GONE, None, expected_msg="Gone") + + +async def test_request_validation_error_handler( + initialized_app: FastAPI, client: AsyncClient +): + _error_msg = "pytest request validation error" + + @initialized_app.get("/test") + async def test_endpoint(): + raise RequestValidationError(errors=[_error_msg]) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_422_UNPROCESSABLE_ENTITY, + None, + expected_msg=_error_msg, + ) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +async def test_validation_error_handler(initialized_app: FastAPI, client: AsyncClient): + _error_msg = "pytest request validation error" + + @initialized_app.get("/test") + async def test_endpoint(): + raise ValidationError.from_exception_data( + _error_msg, + line_errors=[], + ) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_500_INTERNAL_SERVER_ERROR, + None, + expected_msg=f"0 validation errors for {_error_msg}", + ) + + +@pytest.mark.xfail( + reason="Generic exception handler is not working as expected as shown in https://github.com/ITISFoundation/osparc-simcore/blob/5732a12e07e63d5ce55010ede9b9ab543bb9b278/packages/service-library/tests/fastapi/test_exceptions_utils.py" +) +async def test_generic_exception_handler(initialized_app: FastAPI, client: AsyncClient): + _error_msg = "Generic pytest exception" + + @initialized_app.get("/test") + async def test_endpoint(): + raise Exception( # pylint: disable=broad-exception-raised # noqa: TRY002 + _error_msg + ) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_500_INTERNAL_SERVER_ERROR, + None, + expected_msg=_error_msg, + ) diff --git a/services/datcore-adapter/tests/unit/test_route_datasets.py b/services/datcore-adapter/tests/unit/test_route_datasets.py index 2a0d7dc85d6..913cb578a96 100644 --- a/services/datcore-adapter/tests/unit/test_route_datasets.py +++ b/services/datcore-adapter/tests/unit/test_route_datasets.py @@ -6,14 +6,31 @@ import httpx import respx from fastapi_pagination import Page -from pydantic import TypeAdapter -from simcore_service_datcore_adapter.models.schemas.datasets import ( +from models_library.api_schemas_datcore_adapter.datasets import ( DatasetMetaData, FileMetaData, ) +from pydantic import TypeAdapter from starlette import status +async def test_get_dataset_entrypoint( + async_client: httpx.AsyncClient, + pennsieve_dataset_id: str, + pennsieve_subsystem_mock: respx.MockRouter | None, + pennsieve_api_headers: dict[str, str], +): + response = await async_client.get( + f"v0/datasets/{pennsieve_dataset_id}", + headers=pennsieve_api_headers, + ) + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data + TypeAdapter(DatasetMetaData).validate_python(data) + + async def test_list_datasets_entrypoint( async_client: httpx.AsyncClient, pennsieve_subsystem_mock: respx.MockRouter | None, diff --git a/services/datcore-adapter/tests/unit/test_route_files.py b/services/datcore-adapter/tests/unit/test_route_files.py index cbaa09704fa..1a083d71daa 100644 --- a/services/datcore-adapter/tests/unit/test_route_files.py +++ b/services/datcore-adapter/tests/unit/test_route_files.py @@ -6,7 +6,7 @@ import httpx from pydantic import TypeAdapter -from simcore_service_datcore_adapter.models.domains.files import FileDownloadOut +from simcore_service_datcore_adapter.models.files import FileDownloadOut from starlette import status diff --git a/services/datcore-adapter/tests/unit/test_route_health.py b/services/datcore-adapter/tests/unit/test_route_health.py index 3f0b1712f7e..65f04aece0c 100644 --- a/services/datcore-adapter/tests/unit/test_route_health.py +++ b/services/datcore-adapter/tests/unit/test_route_health.py @@ -17,8 +17,7 @@ async def test_live_entrypoint(async_client: httpx.AsyncClient): assert response.text assert datetime.fromisoformat(response.text.split("@")[1]) assert ( - response.text.split("@")[0] - == "simcore_service_datcore_adapter.api.routes.health" + response.text.split("@")[0] == "simcore_service_datcore_adapter.api.rest.health" ) diff --git a/services/director-v2/docker-compose-extra.yml b/services/director-v2/docker-compose-extra.yml index a997c7eab6a..a20199fc14e 100644 --- a/services/director-v2/docker-compose-extra.yml +++ b/services/director-v2/docker-compose-extra.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce + image: "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f" init: true environment: - POSTGRES_USER=${POSTGRES_USER:-test} @@ -24,7 +24,7 @@ services: "log_line_prefix=[%p] [%a] [%c] [%x] " ] rabbit: - image: itisfoundation/rabbitmq:3.11.2-management + image: itisfoundation/rabbitmq:3.13.7-management init: true environment: - RABBITMQ_DEFAULT_USER=${RABBIT_USER} diff --git a/services/director-v2/docker/boot.sh b/services/director-v2/docker/boot.sh index a96161d9660..e8761c085a8 100755 --- a/services/director-v2/docker/boot.sh +++ b/services/director-v2/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/director-v2 - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/director-v2/openapi.json b/services/director-v2/openapi.json index c769aff191a..d3391890fd5 100644 --- a/services/director-v2/openapi.json +++ b/services/director-v2/openapi.json @@ -1458,13 +1458,17 @@ }, "url": { "type": "string", + "minLength": 1, + "format": "uri", "title": "Url", "description": "the link where to get the status of the task" }, "stop_url": { "anyOf": [ { - "type": "string" + "type": "string", + "minLength": 1, + "format": "uri" }, { "type": "null" @@ -3120,7 +3124,9 @@ "download_link": { "anyOf": [ { - "type": "string" + "type": "string", + "minLength": 1, + "format": "uri" }, { "type": "null" diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 4f4bfb4730a..985117ea940 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -1,9 +1,9 @@ -aio-pika==9.4.1 +aio-pika==9.5.5 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -aiocache==0.12.2 +aiocache==0.12.3 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -13,17 +13,19 @@ aiodebug==2.3.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -aiodocker==0.21.0 +aiodocker==0.24.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -aiofiles==23.2.1 +aiofiles==24.1.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -aiohttp==3.9.5 +aiohappyeyeballs==2.5.0 + # via aiohttp +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -65,25 +67,23 @@ aiopg==1.4.0 # via # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in -aiormq==6.8.0 +aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp -alembic==1.13.1 +alembic==1.15.1 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.8.0 # via # fast-depends # faststream # httpx # starlette # watchfiles -appdirs==1.4.4 - # via pint arrow==1.3.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in @@ -96,12 +96,10 @@ arrow==1.3.0 asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 - # via - # aiopg - # asyncpg -asyncpg==0.29.0 + # via aiopg +asyncpg==0.30.0 # via sqlalchemy -attrs==23.2.0 +attrs==25.1.0 # via # aiohttp # jsonschema @@ -110,7 +108,7 @@ bidict==0.23.1 # via python-socketio blosc==1.11.2 # via -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt -certifi==2024.2.2 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -149,13 +147,14 @@ certifi==2024.2.2 # httpcore # httpx # requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.1 # via requests click==8.1.7 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask # distributed + # rich-toolkit # typer # uvicorn cloudpickle==3.1.0 @@ -168,7 +167,7 @@ dask==2024.12.0 # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -deprecated==1.2.14 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -178,30 +177,34 @@ distributed==2024.12.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask -dnspython==2.6.1 +dnspython==2.7.0 # via email-validator -email-validator==2.1.1 +email-validator==2.2.0 # via # fastapi # pydantic +exceptiongroup==1.2.2 + # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.5 +fastapi==0.115.6 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in - # prometheus-fastapi-instrumentator -fastapi-cli==0.0.5 + # fastapi-lifespan-manager +fastapi-cli==0.0.7 # via fastapi -faststream==0.5.31 +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +faststream==0.5.35 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in flexcache==0.3 # via pint -flexparser==0.3.1 +flexparser==0.4 # via pint -frozenlist==1.4.1 +frozenlist==1.5.0 # via # aiohttp # aiosignal @@ -209,24 +212,24 @@ fsspec==2024.10.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask -googleapis-common-protos==1.65.0 +googleapis-common-protos==1.69.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -grpcio==1.66.0 +grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via # httpcore # uvicorn # wsproto -httpcore==1.0.5 +httpcore==1.0.7 # via httpx -httptools==0.6.1 +httptools==0.6.4 # via uvicorn -httpx==0.27.0 +httpx==0.28.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -265,7 +268,7 @@ httpx==0.27.0 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # fastapi -idna==3.7 +idna==3.10 # via # anyio # email-validator @@ -318,14 +321,14 @@ jinja2==3.1.4 # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed # fastapi -jsonschema==4.22.0 +jsonschema==4.23.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in -jsonschema-specifications==2023.7.1 +jsonschema-specifications==2024.10.1 # via jsonschema locket==1.0.0 # via @@ -334,7 +337,7 @@ locket==1.0.0 # partd lz4==4.3.3 # via -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt -mako==1.3.5 +mako==1.3.9 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -385,15 +388,15 @@ msgpack==1.1.0 # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # aiocache # distributed -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -networkx==3.3 +networkx==3.4.2 # via -r requirements/_base.in numpy==2.1.3 # via -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt -opentelemetry-api==1.28.2 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -411,19 +414,19 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-asgi @@ -434,44 +437,44 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aiopg==0.49b2 +opentelemetry-instrumentation-aiopg==0.51b0 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.51b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.49b2 +opentelemetry-instrumentation-asyncpg==0.51b0 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-dbapi==0.49b2 +opentelemetry-instrumentation-dbapi==0.51b0 # via opentelemetry-instrumentation-aiopg -opentelemetry-instrumentation-fastapi==0.49b2 +opentelemetry-instrumentation-fastapi==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -482,7 +485,7 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -490,7 +493,7 @@ opentelemetry-util-http==0.49b2 # opentelemetry-instrumentation-requests ordered-set==4.1.0 # via -r requirements/_base.in -orjson==3.10.3 +orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -563,15 +566,21 @@ partd==1.4.2 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask -pint==0.24.3 +pint==0.24.4 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in -prometheus-client==0.20.0 +platformdirs==4.3.6 + # via pint +prometheus-client==0.21.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # prometheus-fastapi-instrumentator -prometheus-fastapi-instrumentator==6.1.0 +prometheus-fastapi-instrumentator==7.0.2 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -protobuf==5.29.0 +propcache==0.3.0 + # via + # aiohttp + # yarl +protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto @@ -581,11 +590,13 @@ psutil==6.1.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -psycopg2-binary==2.9.9 +psycopg2-binary==2.9.10 # via # aiopg # sqlalchemy -pydantic==2.10.2 +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -659,9 +670,9 @@ pydantic==2.10.2 # fastapi # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.2 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -686,8 +697,42 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # fastapi -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -699,9 +744,9 @@ pydantic-settings==2.6.1 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # fastapi -pygments==2.18.0 +pygments==2.19.1 # via rich -pyinstrument==4.6.2 +pyinstrument==5.0.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -711,11 +756,11 @@ python-dotenv==1.0.1 # via # pydantic-settings # uvicorn -python-engineio==4.9.1 +python-engineio==4.11.2 # via python-socketio -python-multipart==0.0.9 +python-multipart==0.0.20 # via fastapi -python-socketio==5.11.2 +python-socketio==5.12.1 # via -r requirements/_base.in pyyaml==6.0.2 # via @@ -800,13 +845,47 @@ redis==5.2.1 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in # aiocache -referencing==0.29.3 +referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 # via opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==13.9.4 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -814,26 +893,27 @@ rich==13.7.1 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in + # rich-toolkit # typer -rpds-py==0.18.1 +rich-toolkit==0.13.2 + # via fastapi-cli +rpds-py==0.23.1 # via # jsonschema # referencing shellingham==1.5.4 # via typer -simple-websocket==1.0.0 +simple-websocket==1.1.0 # via python-engineio -six==1.16.0 +six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx + # via anyio sortedcontainers==2.4.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -sqlalchemy==1.4.52 +sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -873,7 +953,7 @@ sqlalchemy==1.4.52 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic -starlette==0.41.2 +starlette==0.41.3 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -910,11 +990,16 @@ starlette==0.41.2 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi + # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tblib==3.0.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -tenacity==8.5.0 +tenacity==9.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -932,12 +1017,12 @@ tornado==6.4.2 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -tqdm==4.66.4 +tqdm==4.67.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.12.3 +typer==0.15.2 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -945,13 +1030,13 @@ typer==0.12.3 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli -types-python-dateutil==2.9.0.20240316 +types-python-dateutil==2.9.0.20241206 # via arrow typing-extensions==4.12.2 # via # aiodebug - # aiodocker # alembic + # anyio # fastapi # faststream # flexcache @@ -961,6 +1046,7 @@ typing-extensions==4.12.2 # pydantic # pydantic-core # pydantic-extra-types + # rich-toolkit # typer ujson==5.10.0 # via @@ -1038,18 +1124,18 @@ urllib3==2.2.3 # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed # requests -uvicorn==0.29.0 +uvicorn==0.34.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi # fastapi-cli -uvloop==0.19.0 +uvloop==0.21.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==1.0.4 # via uvicorn -websockets==12.0 +websockets==15.0.1 # via uvicorn -wrapt==1.16.0 +wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation @@ -1059,7 +1145,7 @@ wrapt==1.16.0 # opentelemetry-instrumentation-redis wsproto==1.2.0 # via simple-websocket -yarl==1.9.4 +yarl==1.18.3 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index b04288c65d2..508bfac7e60 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -1,4 +1,4 @@ -aio-pika==9.4.1 +aio-pika==9.5.5 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -6,30 +6,34 @@ aioboto3==13.3.0 # via -r requirements/_test.in aiobotocore==2.16.0 # via aioboto3 -aiofiles==23.2.1 +aiofiles==24.1.0 # via # -c requirements/_base.txt # aioboto3 -aiohttp==3.9.5 +aiohappyeyeballs==2.5.0 + # via + # -c requirements/_base.txt + # aiohttp +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aiobotocore aioitertools==0.12.0 # via aiobotocore -aiormq==6.8.0 +aiormq==6.8.1 # via # -c requirements/_base.txt # aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via # -c requirements/_base.txt # aiohttp -alembic==1.13.1 +alembic==1.15.1 # via # -c requirements/_base.txt # -r requirements/_test.in -anyio==4.3.0 +anyio==4.8.0 # via # -c requirements/_base.txt # httpx @@ -37,12 +41,12 @@ asgi-lifespan==2.1.0 # via -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in -attrs==23.2.0 +attrs==25.1.0 # via # -c requirements/_base.txt # aiohttp # pytest-docker -bokeh==3.6.2 +bokeh==3.6.3 # via dask boto3==1.35.81 # via @@ -53,14 +57,14 @@ botocore==1.35.81 # aiobotocore # boto3 # s3transfer -certifi==2024.2.2 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # httpcore # httpx # requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests @@ -76,7 +80,7 @@ cloudpickle==3.1.0 # distributed contourpy==1.3.1 # via bokeh -coverage==7.6.10 +coverage==7.6.12 # via pytest-cov dask==2024.12.0 # via @@ -89,13 +93,17 @@ distributed==2024.12.0 # dask docker==7.1.0 # via -r requirements/_test.in +exceptiongroup==1.2.2 + # via + # -c requirements/_base.txt + # aio-pika execnet==2.1.1 # via pytest-xdist -faker==35.0.0 +faker==36.2.2 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in -frozenlist==1.4.1 +frozenlist==1.5.0 # via # -c requirements/_base.txt # aiohttp @@ -104,7 +112,7 @@ fsspec==2024.10.0 # via # -c requirements/_base.txt # dask -greenlet==3.0.3 +greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy @@ -112,18 +120,18 @@ h11==0.14.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.5 +httpcore==1.0.7 # via # -c requirements/_base.txt # httpx -httpx==0.27.0 +httpx==0.28.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # respx icdiff==2.0.7 # via pytest-icdiff -idna==3.7 +idna==3.10 # via # -c requirements/_base.txt # anyio @@ -152,7 +160,7 @@ locket==1.0.0 # -c requirements/_base.txt # distributed # partd -mako==1.3.5 +mako==1.3.9 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -166,13 +174,13 @@ msgpack==1.1.0 # via # -c requirements/_base.txt # distributed -multidict==6.0.5 +multidict==6.1.0 # via # -c requirements/_base.txt # aiohttp # async-asgi-testclient # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -206,11 +214,16 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff +propcache==0.3.0 + # via + # -c requirements/_base.txt + # aiohttp + # yarl psutil==6.1.0 # via # -c requirements/_base.txt # distributed -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -225,7 +238,7 @@ pytest-asyncio==0.21.2 # -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in @@ -239,9 +252,8 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # pandas -pytz==2024.2 +pytz==2025.1 # via pandas pyyaml==6.0.2 # via @@ -259,7 +271,7 @@ respx==0.22.0 # via -r requirements/_test.in s3transfer==0.10.4 # via boto3 -six==1.16.0 +six==1.17.0 # via # -c requirements/_base.txt # python-dateutil @@ -268,12 +280,11 @@ sniffio==1.3.1 # -c requirements/_base.txt # anyio # asgi-lifespan - # httpx sortedcontainers==2.4.0 # via # -c requirements/_base.txt # distributed -sqlalchemy==1.4.52 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -296,7 +307,7 @@ tornado==6.4.2 # -c requirements/_base.txt # bokeh # distributed -types-networkx==3.4.2.20241227 +types-networkx==3.4.2.20250304 # via -r requirements/_test.in types-psycopg2==2.9.21.20250121 # via -r requirements/_test.in @@ -306,11 +317,13 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # alembic - # faker + # anyio # mypy # sqlalchemy2-stubs tzdata==2025.1 - # via pandas + # via + # faker + # pandas urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt @@ -319,13 +332,13 @@ urllib3==2.2.3 # distributed # docker # requests -wrapt==1.16.0 +wrapt==1.17.2 # via # -c requirements/_base.txt # aiobotocore xyzservices==2025.1.0 # via bokeh -yarl==1.9.4 +yarl==1.18.3 # via # -c requirements/_base.txt # aio-pika diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt index 6d88cf90d05..322c3b95791 100644 --- a/services/director-v2/requirements/_tools.txt +++ b/services/director-v2/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -47,18 +47,19 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt platformdirs==4.3.6 # via + # -c requirements/_base.txt # black # pylint # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -71,9 +72,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -82,7 +83,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.3 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/director-v2/src/simcore_service_director_v2/cli/__init__.py b/services/director-v2/src/simcore_service_director_v2/cli/__init__.py index eb4d050bd80..f33d5972260 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/__init__.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/__init__.py @@ -26,7 +26,11 @@ DEFAULT_OUTPUTS_PUSH_ATTEMPTS: Final[int] = 3 DEFAULT_TASK_UPDATE_INTERVAL_S: Final[int] = 1 -main = typer.Typer(name=PROJECT_NAME) +main = typer.Typer( + name=PROJECT_NAME, + pretty_exceptions_enable=False, + pretty_exceptions_show_locals=False, +) _logger = logging.getLogger(__name__) diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index eb6fe5d64fd..c53c841183e 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -6,8 +6,8 @@ get_common_oas_options, override_fastapi_openapi_method, ) -from servicelib.fastapi.profiler_middleware import ProfilerMiddleware -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.profiler import initialize_profiler +from servicelib.fastapi.tracing import initialize_tracing from servicelib.logging_utils import config_all_loggers from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY @@ -145,7 +145,7 @@ def init_app(settings: AppSettings | None = None) -> FastAPI: substitutions.setup(app) if settings.DIRECTOR_V2_TRACING: - setup_tracing(app, settings.DIRECTOR_V2_TRACING, APP_NAME) + initialize_tracing(app, settings.DIRECTOR_V2_TRACING, APP_NAME) if settings.DIRECTOR_V0.DIRECTOR_ENABLED: director_v0.setup( @@ -204,7 +204,7 @@ def init_app(settings: AppSettings | None = None) -> FastAPI: instrumentation.setup(app) if settings.DIRECTOR_V2_PROFILING: - app.add_middleware(ProfilerMiddleware) + initialize_profiler(app) # setup app -- app.add_event_handler("startup", on_startup) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 915b5b2f1d0..6f0f7bf7986 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -2,7 +2,6 @@ from contextlib import suppress from typing import TypeAlias -from models_library.clusters import ClusterID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState @@ -45,7 +44,6 @@ class CompRunsAtDB(BaseModel): run_id: PositiveInt project_uuid: ProjectID user_id: UserID - cluster_id: ClusterID | None iteration: Iteration result: RunningState created: datetime.datetime @@ -93,7 +91,6 @@ def convert_null_to_empty_metadata(cls, v): "run_id": 432, "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5", "user_id": 132, - "cluster_id": None, "iteration": 42, "result": "UNKNOWN", "started": None, @@ -109,7 +106,6 @@ def convert_null_to_empty_metadata(cls, v): "run_id": 432, "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5", "user_id": 132, - "cluster_id": None, "iteration": 42, "result": "NOT_STARTED", "started": None, @@ -125,7 +121,6 @@ def convert_null_to_empty_metadata(cls, v): "run_id": 43243, "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5", "user_id": 132, - "cluster_id": None, "iteration": 12, "result": "SUCCESS", "created": "2021-03-01T13:07:34.191610", @@ -148,7 +143,6 @@ def convert_null_to_empty_metadata(cls, v): "run_id": 43243, "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5", "user_id": 132, - "cluster_id": None, "iteration": 12, "result": "SUCCESS", "created": "2021-03-01T13:07:34.191610", diff --git a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py index ab94d681a11..d9d4c3e6144 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py @@ -83,7 +83,7 @@ async def get_service( ) -> dict[str, Any]: resp = await self.request( "GET", - f"/services/{urllib.parse.quote( service_key, safe='')}/{service_version}", + f"/services/{urllib.parse.quote(service_key, safe='')}/{service_version}", params={"user_id": user_id}, headers={"X-Simcore-Products-Name": product_name}, ) @@ -98,7 +98,7 @@ async def get_service_resources( ) -> ServiceResourcesDict: resp = await self.request( "GET", - f"/services/{urllib.parse.quote( service_key, safe='')}/{service_version}/resources", + f"/services/{urllib.parse.quote(service_key, safe='')}/{service_version}/resources", params={"user_id": user_id}, ) resp.raise_for_status() @@ -114,7 +114,7 @@ async def get_service_labels( ) -> SimcoreServiceLabels: resp = await self.request( "GET", - f"/services/{urllib.parse.quote( service_key, safe='')}/{service_version}/labels", + f"/services/{urllib.parse.quote(service_key, safe='')}/{service_version}/labels", ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: @@ -137,7 +137,7 @@ async def get_service_specifications( ) -> dict[str, Any]: resp = await self.request( "GET", - f"/services/{urllib.parse.quote( service_key, safe='')}/{service_version}/specifications", + f"/services/{urllib.parse.quote(service_key, safe='')}/{service_version}/specifications", params={"user_id": user_id}, ) resp.raise_for_status() diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 46cc7669cde..a5dc296aa21 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -10,13 +10,12 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import PositiveInt -from simcore_postgres_database.errors import ForeignKeyViolation +from simcore_postgres_database.aiopg_errors import ForeignKeyViolation from sqlalchemy.sql import or_ from sqlalchemy.sql.elements import literal_column from sqlalchemy.sql.expression import desc from ....core.errors import ( - ClusterNotFoundError, ComputationalRunNotFoundError, DirectorError, ProjectNotFoundError, @@ -37,10 +36,6 @@ ProjectNotFoundError, ("projects", "project_id"), ), - comp_runs.c.cluster_id: ( - ClusterNotFoundError, - ("clusters", "cluster_id"), - ), } @@ -172,7 +167,6 @@ async def create( .values( user_id=user_id, project_uuid=f"{project_id}", - cluster_id=None, iteration=iteration, result=RUNNING_STATE_TO_DB[RunningState.PUBLISHED], started=datetime.datetime.now(tz=datetime.UTC), diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py index ce3decc2ca6..949ba98f4fe 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py @@ -146,9 +146,9 @@ async def observing_single_service( # With unhandled errors, let's generate and ID and send it to the end-user # so that we can trace the logs and debug the issue. user_error_msg = ( - f"This service ({service_name}) unexpectedly failed." - " Our team has recorded the issue and is working to resolve it as quickly as possible." - " Thank you for your patience." + f"The service ({service_name}) experienced a problem. " + "Our team has recorded the issue. " + "If the issue persists please report it." ) error_code = create_error_code(exc) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py index 3b75607989d..550b2eddfef 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py @@ -12,8 +12,8 @@ WalletTotalCredits, ) from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingUnitGet, ) from models_library.products import ProductName from models_library.resource_tracker import ( @@ -82,7 +82,7 @@ async def get_default_service_pricing_plan( product_name: ProductName, service_key: ServiceKey, service_version: ServiceVersion, - ) -> PricingPlanGet: + ) -> RutPricingPlanGet: response = await self.client.get( f"/services/{service_key}/{service_version}/pricing-plan", params={ @@ -94,7 +94,7 @@ async def get_default_service_pricing_plan( raise PricingPlanUnitNotFoundError(msg=msg) response.raise_for_status() - return PricingPlanGet.model_validate(response.json()) + return RutPricingPlanGet.model_validate(response.json()) async def get_default_pricing_and_hardware_info( self, @@ -124,7 +124,7 @@ async def get_pricing_unit( product_name: ProductName, pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, - ) -> PricingUnitGet: + ) -> RutPricingUnitGet: response = await self.client.get( f"/pricing-plans/{pricing_plan_id}/pricing-units/{pricing_unit_id}", params={ @@ -132,7 +132,7 @@ async def get_pricing_unit( }, ) response.raise_for_status() - return PricingUnitGet.model_validate(response.json()) + return RutPricingUnitGet.model_validate(response.json()) async def get_wallet_credits( self, diff --git a/services/director-v2/tests/integration/02/conftest.py b/services/director-v2/tests/integration/02/conftest.py index 1cc8e4fd64e..4e0b6a5b31f 100644 --- a/services/director-v2/tests/integration/02/conftest.py +++ b/services/director-v2/tests/integration/02/conftest.py @@ -5,16 +5,16 @@ from uuid import uuid4 import aiodocker -from pydantic import TypeAdapter import pytest from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, + RutPricingPlanGet, ) from models_library.projects_networks import ProjectsNetworks from models_library.services_resources import ( ServiceResourcesDict, ServiceResourcesDictHelpers, ) +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture @@ -80,8 +80,8 @@ def service_resources() -> ServiceResourcesDict: @pytest.fixture def mock_resource_usage_tracker(mocker: MockerFixture) -> None: base_module = "simcore_service_director_v2.modules.resource_usage_tracker_client" - service_pricing_plan = PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][1] + service_pricing_plan = RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][1] ) for unit in service_pricing_plan.pricing_units: unit.specific_info.aws_ec2_instances.clear() diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 8c587a2a08e..1d867be004a 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -369,7 +369,6 @@ def mock_env( "STORAGE_PORT": local_settings.STORAGE_PORT, } ), - "STORAGE_ENDPOINT": "storage:8080", "DYNAMIC_SIDECAR_IMAGE": image_name, "DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS": "{}", "TRAEFIK_SIMCORE_ZONE": "test_traefik_zone", diff --git a/services/director-v2/tests/unit/test_models_clusters.py b/services/director-v2/tests/unit/test_models_clusters.py index ae0b17dd43e..a974d680246 100644 --- a/services/director-v2/tests/unit/test_models_clusters.py +++ b/services/director-v2/tests/unit/test_models_clusters.py @@ -6,9 +6,7 @@ Worker, WorkerMetrics, ) -from models_library.clusters import ClusterTypeInModel from pydantic import ByteSize, TypeAdapter -from simcore_postgres_database.models.clusters import ClusterType def test_scheduler_constructor_with_default_has_correct_dict(faker: Faker): @@ -41,23 +39,3 @@ def test_worker_constructor_corrects_negative_used_resources(faker: Faker): ) assert worker assert worker.used_resources["CPU"] == 0 - - -def test_cluster_type_in_model_includes_postgres_database_model(): - models_library_cluster_types_names: set[str] = { - t.name for t in set(ClusterTypeInModel) - } - postgres_library_cluster_types_names: set[str] = {t.name for t in set(ClusterType)} - assert postgres_library_cluster_types_names.issubset( - models_library_cluster_types_names - ) - - models_library_cluster_types_values: set[str] = { - t.value for t in set(ClusterTypeInModel) - } # type: ignore - postgres_library_cluster_types_values: set[str] = { - t.value for t in set(ClusterType) - } - assert postgres_library_cluster_types_values.issubset( - models_library_cluster_types_values - ) diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index 8c14c46bbb1..d3c6274fa7c 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -22,7 +22,6 @@ ) from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_postgres_database.models.clusters import ClusterType from simcore_service_director_v2.core.application import init_app from simcore_service_director_v2.core.errors import ( ConfigurationError, @@ -86,7 +85,7 @@ def creator(num_clusters: int) -> list[BaseCluster]: { "id": faker.pyint(), "name": faker.name(), - "type": ClusterType.ON_PREMISE, + "type": ClusterTypeInModel.ON_PREMISE, "owner": faker.pyint(), "endpoint": faker.uri(), "authentication": choice( # noqa: S311 diff --git a/services/director-v2/tests/unit/test_modules_notifier.py b/services/director-v2/tests/unit/test_modules_notifier.py index 0056984847c..357edc68af8 100644 --- a/services/director-v2/tests/unit/test_modules_notifier.py +++ b/services/director-v2/tests/unit/test_modules_notifier.py @@ -179,4 +179,4 @@ async def test_notifier_publish_message( ) ) - await _assert_call_count(server_disconnect, call_count=number_of_clients) + await _assert_call_count(server_disconnect, call_count=number_of_clients * 2) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py index 028b33ad484..129ceed3634 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py @@ -31,8 +31,8 @@ ) from models_library.api_schemas_directorv2.services import ServiceExtras from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingUnitGet, ) from models_library.projects import ProjectAtDB from models_library.projects_nodes import NodeID, NodeState @@ -169,14 +169,7 @@ def _mocked_service_resources(request) -> httpx.Response: def _mocked_services_details( request, service_key: str, service_version: str ) -> httpx.Response: - assert "json_schema_extra" in ServiceGet.model_config - assert isinstance(ServiceGet.model_config["json_schema_extra"], dict) - assert isinstance( - ServiceGet.model_config["json_schema_extra"]["examples"], list - ) - assert isinstance( - ServiceGet.model_config["json_schema_extra"]["examples"][0], dict - ) + data_published = fake_service_details.model_copy( update={ "key": urllib.parse.unquote(service_key), @@ -184,7 +177,7 @@ def _mocked_services_details( } ).model_dump(by_alias=True) data = { - **ServiceGet.model_config["json_schema_extra"]["examples"][0], + **ServiceGet.model_json_schema()["examples"][0], **data_published, } payload = ServiceGet.model_validate(data) @@ -287,22 +280,25 @@ def _mocked_services_details( yield respx_mock -assert "json_schema_extra" in PricingPlanGet.model_config -assert isinstance(PricingPlanGet.model_config["json_schema_extra"], dict) -assert isinstance(PricingPlanGet.model_config["json_schema_extra"]["examples"], list) +assert "json_schema_extra" in RutPricingPlanGet.model_config +assert isinstance(RutPricingPlanGet.model_config["json_schema_extra"], dict) +assert isinstance(RutPricingPlanGet.model_config["json_schema_extra"]["examples"], list) @pytest.fixture( - params=PricingPlanGet.model_config["json_schema_extra"]["examples"], + params=[ + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][0], + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][1], + ], ids=["with ec2 restriction", "without"], ) -def default_pricing_plan(request: pytest.FixtureRequest) -> PricingPlanGet: - return PricingPlanGet(**request.param) +def default_pricing_plan(request: pytest.FixtureRequest) -> RutPricingPlanGet: + return RutPricingPlanGet(**request.param) @pytest.fixture def default_pricing_plan_aws_ec2_type( - default_pricing_plan: PricingPlanGet, + default_pricing_plan: RutPricingPlanGet, ) -> str | None: assert default_pricing_plan.pricing_units for p in default_pricing_plan.pricing_units: @@ -317,7 +313,7 @@ def default_pricing_plan_aws_ec2_type( @pytest.fixture def mocked_resource_usage_tracker_service_fcts( - minimal_app: FastAPI, default_pricing_plan: PricingPlanGet + minimal_app: FastAPI, default_pricing_plan: RutPricingPlanGet ) -> Iterator[respx.MockRouter]: def _mocked_service_default_pricing_plan( request, service_key: str, service_version: str @@ -332,10 +328,10 @@ def _mocked_service_default_pricing_plan( ) def _mocked_get_pricing_unit(request, pricing_plan_id: int) -> httpx.Response: - assert "json_schema_extra" in PricingUnitGet.model_config - assert isinstance(PricingUnitGet.model_config["json_schema_extra"], dict) + assert "json_schema_extra" in RutPricingUnitGet.model_config + assert isinstance(RutPricingUnitGet.model_config["json_schema_extra"], dict) assert isinstance( - PricingUnitGet.model_config["json_schema_extra"]["examples"], list + RutPricingUnitGet.model_config["json_schema_extra"]["examples"], list ) return httpx.Response( 200, @@ -343,7 +339,9 @@ def _mocked_get_pricing_unit(request, pricing_plan_id: int) -> httpx.Response: ( default_pricing_plan.pricing_units[0] if default_pricing_plan.pricing_units - else PricingUnitGet.model_config["json_schema_extra"]["examples"][0] + else RutPricingUnitGet.model_config["json_schema_extra"][ + "examples" + ][0] ), by_alias=True, ), @@ -597,8 +595,8 @@ async def test_create_computation_with_wallet( @pytest.mark.parametrize( "default_pricing_plan", [ - PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][0] + RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][0] ) ], ) @@ -639,8 +637,8 @@ async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_rai @pytest.mark.parametrize( "default_pricing_plan", [ - PricingPlanGet( - **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] # type: ignore + RutPricingPlanGet( + **RutPricingPlanGet.model_config["json_schema_extra"]["examples"][0] # type: ignore ) ], ) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py index 73d59a740c5..3aabbf90d73 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py @@ -92,8 +92,8 @@ class Loc(NamedTuple): autospec=True, return_value=faker.url(), ), - "get_storage_locations": mocker.patch( - "simcore_sdk.node_ports_common.storage_client.get_storage_locations", + "list_storage_locations": mocker.patch( + "simcore_sdk.node_ports_common.storage_client.list_storage_locations", autospec=True, return_value=[ Loc(name="simcore.s3", id=0), @@ -159,7 +159,7 @@ async def test_get_all_tasks_log_files( ) # calls storage - mocked_nodeports_storage_client["get_storage_locations"].assert_not_called() + mocked_nodeports_storage_client["list_storage_locations"].assert_not_called() assert mocked_nodeports_storage_client["get_download_file_link"].called # test expected response according to OAS! @@ -207,7 +207,6 @@ async def test_get_tasks_outputs( async def test_get_tasks_outputs_not_found(node_id: NodeID, client: httpx.AsyncClient): - invalid_project = uuid4() resp = await client.post( f"/v2/computations/{invalid_project}/tasks/-/outputs:batchGet", diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py index 47bdd35f8cd..f5bc1ca084d 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py @@ -172,7 +172,6 @@ async def test_schedule_all_pipelines( assert comp_run.user_id == published_project.project.prj_owner assert comp_run.iteration == 1 assert comp_run.cancelled is None - assert comp_run.cluster_id is None assert comp_run.metadata == run_metadata assert comp_run.result is RunningState.PUBLISHED assert comp_run.scheduled is not None @@ -275,7 +274,6 @@ async def test_schedule_all_pipelines_logs_error_if_it_find_old_pipelines( assert comp_run.user_id == published_project.project.prj_owner assert comp_run.iteration == 1 assert comp_run.cancelled is None - assert comp_run.cluster_id is None assert comp_run.metadata == run_metadata assert comp_run.result is RunningState.PUBLISHED assert comp_run.scheduled is not None diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index 31da3cae2a2..7c991d3390d 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -30,7 +30,10 @@ from faker import Faker from fastapi import FastAPI from models_library.api_schemas_directorv2.services import NodeRequirements -from models_library.api_schemas_storage import FileUploadLinks, FileUploadSchema +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadLinks, + FileUploadSchema, +) from models_library.docker import to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimCoreFileLink, SimcoreS3FileID diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index eba9085247c..35c79fc1a7a 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -24,14 +24,14 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/director - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - uv pip list fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index b151fdf4639..f0a9700476e 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -95,9 +95,12 @@ fastapi==0.115.5 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager # prometheus-fastapi-instrumentator fastapi-cli==0.0.5 # via fastapi +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.31 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 @@ -296,6 +299,8 @@ protobuf==5.28.3 # opentelemetry-proto psutil==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -341,6 +346,18 @@ pydantic-extra-types==2.10.0 # fastapi pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -392,6 +409,18 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.29.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -428,6 +457,8 @@ starlette==0.41.3 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 46b7cec13da..76200593ee1 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -35,11 +35,11 @@ charset-normalizer==3.4.0 # via # -c requirements/_base.txt # requests -coverage==7.6.10 +coverage==7.6.12 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in frozenlist==1.5.0 # via @@ -90,7 +90,7 @@ propcache==0.2.0 # yarl py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -108,7 +108,7 @@ pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in @@ -118,20 +118,12 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker requests==2.32.3 # via # -c requirements/_base.txt # docker respx==0.22.0 # via -r requirements/_test.in -six==1.16.0 - # via - # -c requirements/_base.txt - # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt @@ -140,10 +132,8 @@ sniffio==1.3.1 # httpx termcolor==2.5.0 # via pytest-sugar -typing-extensions==4.12.2 - # via - # -c requirements/_base.txt - # faker +tzdata==2025.1 + # via faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt index f7afb651ac2..9bc3fb1323c 100644 --- a/services/director/requirements/_tools.txt +++ b/services/director/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -43,7 +43,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -54,7 +54,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -66,18 +66,17 @@ pyyaml==6.0.2 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt - # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index 86031f0d935..c6df150dde3 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -3,7 +3,8 @@ from fastapi import FastAPI from servicelib.async_utils import cancel_sequential_workers -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.client_session import setup_client_session +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -13,7 +14,6 @@ APP_STARTED_BANNER_MSG, ) from ..api.rest.routes import setup_api_routes -from ..client_session import setup_client_session from ..instrumentation import setup as setup_instrumentation from ..registry_proxy import setup as setup_registry from .settings import ApplicationSettings @@ -51,10 +51,12 @@ def create_app(settings: ApplicationSettings) -> FastAPI: setup_api_routes(app) if app.state.settings.DIRECTOR_TRACING: - setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) + initialize_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) - # replace by httpx client - setup_client_session(app) + setup_client_session( + app, + max_keepalive_connections=settings.DIRECTOR_REGISTRY_CLIENT_MAX_KEEPALIVE_CONNECTIONS, + ) setup_registry(app) setup_instrumentation(app) diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index a17f9175bff..bf949cb5827 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -112,6 +112,7 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ) + DIRECTOR_REGISTRY_CLIENT_MAX_KEEPALIVE_CONNECTIONS: NonNegativeInt = 0 DIRECTOR_REGISTRY_CLIENT_MAX_CONCURRENT_CALLS: PositiveInt = 20 DIRECTOR_REGISTRY_CLIENT_MAX_NUMBER_OF_RETRIEVED_OBJECTS: PositiveInt = 30 diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 81d1accf23d..78d218d3476 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -17,13 +17,13 @@ from packaging.version import Version from servicelib.async_utils import run_sequentially_in_context from servicelib.docker_utils import to_datetime +from servicelib.fastapi.client_session import get_client_session from settings_library.docker_registry import RegistrySettings from tenacity import retry, wait_random_exponential from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_attempt from . import docker_utils, registry_proxy -from .client_session import get_client_session from .constants import ( CPU_RESOURCE_LIMIT_KEY, MEM_RESOURCE_LIMIT_KEY, @@ -465,7 +465,7 @@ async def _create_docker_service_params( def _get_service_entrypoint( - service_boot_parameters_labels: list[dict[str, Any]] + service_boot_parameters_labels: list[dict[str, Any]], ) -> str: _logger.debug("Getting service entrypoint") for param in service_boot_parameters_labels: @@ -1157,7 +1157,6 @@ async def _save_service_state( response.raise_for_status() except httpx.HTTPStatusError as err: - if err.response.status_code in ( status.HTTP_405_METHOD_NOT_ALLOWED, status.HTTP_404_NOT_FOUND, @@ -1237,7 +1236,6 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non service_host_name, session=get_client_session(app) ) except httpx.HTTPStatusError as err: - raise ServiceStateSaveError( service_uuid=node_uuid, reason=f"service {service_host_name} rejected to save state, " diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 619e68a0f44..f90373bb2f1 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -11,6 +11,7 @@ from fastapi import FastAPI, status from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task +from servicelib.fastapi.client_session import get_client_session from servicelib.logging_utils import log_catch, log_context from servicelib.utils import limited_as_completed from tenacity import retry @@ -20,7 +21,6 @@ from tenacity.wait import wait_fixed, wait_random_exponential from yarl import URL -from .client_session import get_client_session from .constants import DIRECTOR_SIMCORE_SERVICES_PREFIX from .core.errors import ( DirectorRuntimeError, @@ -156,7 +156,9 @@ async def _auth_registry_request( # noqa: C901 return (resp_data, resp_headers) if auth_type == "Basic": # basic authentication should not be since we tried already... - resp_wbasic = await getattr(session, method.lower())(url, auth=auth, **kwargs) + resp_wbasic = await getattr(session, method.lower())( + str(url), auth=auth, **kwargs + ) assert isinstance(resp_wbasic, httpx.Response) # nosec if resp_wbasic.status_code == status.HTTP_404_NOT_FOUND: raise ServiceNotAvailableError(service_name=f"{url}") diff --git a/services/director/tests/unit/test__model_examples.py b/services/director/tests/unit/test__model_examples.py index cb38a9a90f7..b304277c536 100644 --- a/services/director/tests/unit/test__model_examples.py +++ b/services/director/tests/unit/test__model_examples.py @@ -4,13 +4,15 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -import json from typing import Any import pytest import simcore_service_director.models -from pydantic import BaseModel, ValidationError -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pydantic import BaseModel +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) @pytest.mark.parametrize( @@ -18,11 +20,8 @@ walk_model_examples_in_package(simcore_service_director.models), ) def test_director_service_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - try: - assert model_cls.model_validate(example_data) is not None - except ValidationError as err: - pytest.fail( - f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" - ) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 4b729c424bb..42e0a9b99a2 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -375,7 +375,7 @@ async def test_get_service_key_version_from_docker_service( @pytest.mark.parametrize( "fake_service_str", [ - "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc", + "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f", "/simcore/postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc", "itisfoundation/postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc", "/simcore/services/stuff/postgres:10.11", diff --git a/services/docker-api-proxy/Dockerfile b/services/docker-api-proxy/Dockerfile new file mode 100644 index 00000000000..437d549e9fe --- /dev/null +++ b/services/docker-api-proxy/Dockerfile @@ -0,0 +1,44 @@ +FROM alpine:3.21 AS base + +LABEL maintainer=GitHK + +# simcore-user uid=8004(scu) gid=8004(scu) groups=8004(scu) +ENV SC_USER_ID=8004 \ + SC_USER_NAME=scu \ + SC_BUILD_TARGET=base \ + SC_BOOT_MODE=default + +RUN addgroup -g ${SC_USER_ID} ${SC_USER_NAME} && \ + adduser -u ${SC_USER_ID} -G ${SC_USER_NAME} \ + --disabled-password \ + --gecos "" \ + --shell /bin/sh \ + --home /home/${SC_USER_NAME} \ + ${SC_USER_NAME} + +RUN apk add --no-cache socat curl && \ + curl -L -o /usr/local/bin/gosu https://github.com/tianon/gosu/releases/download/1.16/gosu-amd64 && \ + chmod +x /usr/local/bin/gosu && \ + gosu --version + + +# Health check to ensure the proxy is running +HEALTHCHECK \ + --interval=10s \ + --timeout=5s \ + --start-period=30s \ + --start-interval=1s \ + --retries=5 \ + CMD curl http://localhost:8888/version || exit 1 + +COPY --chown=scu:scu services/docker-api-proxy/docker services/docker-api-proxy/docker +RUN chmod +x services/docker-api-proxy/docker/*.sh + +ENTRYPOINT [ "/bin/sh", "services/docker-api-proxy/docker/entrypoint.sh" ] +CMD ["/bin/sh", "services/docker-api-proxy/docker/boot.sh"] + +FROM base AS development +ENV SC_BUILD_TARGET=development + +FROM base AS production +ENV SC_BUILD_TARGET=production diff --git a/services/docker-api-proxy/Makefile b/services/docker-api-proxy/Makefile new file mode 100644 index 00000000000..82ebf1a73f3 --- /dev/null +++ b/services/docker-api-proxy/Makefile @@ -0,0 +1,2 @@ +include ../../scripts/common.Makefile +include ../../scripts/common-service.Makefile diff --git a/services/docker-api-proxy/docker/boot.sh b/services/docker-api-proxy/docker/boot.sh new file mode 100755 index 00000000000..8fa139339b9 --- /dev/null +++ b/services/docker-api-proxy/docker/boot.sh @@ -0,0 +1,15 @@ +#!/bin/sh +set -o errexit +set -o nounset + +IFS=$(printf '\n\t') + +INFO="INFO: [$(basename "$0")] " + +echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." +echo "$INFO" "User :$(id "$(whoami)")" + +# +# RUNNING application +# +socat TCP-LISTEN:8888,fork,reuseaddr UNIX-CONNECT:/var/run/docker.sock diff --git a/services/docker-api-proxy/docker/entrypoint.sh b/services/docker-api-proxy/docker/entrypoint.sh new file mode 100755 index 00000000000..074dbfdf578 --- /dev/null +++ b/services/docker-api-proxy/docker/entrypoint.sh @@ -0,0 +1,34 @@ +#!/bin/sh +# +# - Executes *inside* of the container upon start as --user [default root] +# - Notice that the container *starts* as --user [default root] but +# *runs* as non-root user [scu] +# +set -o errexit +set -o nounset + +IFS=$(printf '\n\t') + +INFO="INFO: [$(basename "$0")] " + +echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..." +echo "$INFO" "User :$(id "$(whoami)")" + +# Appends docker group +DOCKER_MOUNT=/var/run/docker.sock +echo "INFO: adding user to group..." +GROUPID=$(stat -c %g $DOCKER_MOUNT) # Alpine uses `-c` instead of `--format` +GROUPNAME=scdocker + +# Check if a group with the specified GID exists +if ! addgroup -g "$GROUPID" $GROUPNAME >/dev/null 2>&1; then + echo "WARNING: docker group with GID $GROUPID already exists, getting group name..." + # Get the group name based on GID + GROUPNAME=$(getent group | awk -F: "\$3 == $GROUPID {print \$1}") + echo "WARNING: docker group with GID $GROUPID has name $GROUPNAME" +fi + +# Add the user to the group +adduser "$SC_USER_NAME" $GROUPNAME + +exec gosu "$SC_USER_NAME" "$@" diff --git a/services/docker-api-proxy/requirements/Makefile b/services/docker-api-proxy/requirements/Makefile new file mode 100644 index 00000000000..3f25442b790 --- /dev/null +++ b/services/docker-api-proxy/requirements/Makefile @@ -0,0 +1,6 @@ +# +# Targets to pip-compile requirements +# +include ../../../requirements/base.Makefile + +# Add here any extra explicit dependency: e.g. _migration.txt: _base.txt diff --git a/services/storage/tests/helpers/__init__.py b/services/docker-api-proxy/requirements/_base.in similarity index 100% rename from services/storage/tests/helpers/__init__.py rename to services/docker-api-proxy/requirements/_base.in diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/__init__.py b/services/docker-api-proxy/requirements/_base.txt similarity index 100% rename from services/web/server/src/simcore_service_webserver/meta_modeling/__init__.py rename to services/docker-api-proxy/requirements/_base.txt diff --git a/services/docker-api-proxy/requirements/_test.in b/services/docker-api-proxy/requirements/_test.in new file mode 100644 index 00000000000..321d2e72461 --- /dev/null +++ b/services/docker-api-proxy/requirements/_test.in @@ -0,0 +1,22 @@ +--constraint ../../../requirements/constraints.txt + +--requirement ../../../packages/common-library/requirements/_base.in +--requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/settings-library/requirements/_base.in +--requirement ../../../packages/service-library/requirements/_base.in + +aiodocker +arrow +asgi_lifespan +docker +faker +fastapi +fastapi-lifespan-manager +flaky +pytest +pytest-asyncio +pytest-cov +pytest-mock +python-dotenv +PyYAML +tenacity diff --git a/services/docker-api-proxy/requirements/_test.txt b/services/docker-api-proxy/requirements/_test.txt new file mode 100644 index 00000000000..a30e184bfa9 --- /dev/null +++ b/services/docker-api-proxy/requirements/_test.txt @@ -0,0 +1,463 @@ +aio-pika==9.5.5 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiocache==0.12.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodebug==2.3.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodocker==0.24.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_test.in +aiofiles==24.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiohappyeyeballs==2.4.6 + # via aiohttp +aiohttp==3.11.13 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiormq==6.8.1 + # via aio-pika +aiosignal==1.3.2 + # via aiohttp +annotated-types==0.7.0 + # via pydantic +anyio==4.8.0 + # via + # fast-depends + # faststream + # starlette +arrow==1.3.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_test.in +asgi-lifespan==2.1.0 + # via -r requirements/_test.in +attrs==25.1.0 + # via + # aiohttp + # jsonschema + # referencing +certifi==2025.1.31 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # requests +charset-normalizer==3.4.1 + # via requests +click==8.1.8 + # via typer +coverage==7.6.12 + # via pytest-cov +deprecated==1.2.18 + # via + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-semantic-conventions +dnspython==2.7.0 + # via email-validator +docker==7.1.0 + # via -r requirements/_test.in +email-validator==2.2.0 + # via pydantic +exceptiongroup==1.2.2 + # via aio-pika +faker==36.1.1 + # via -r requirements/_test.in +fast-depends==2.4.12 + # via faststream +fastapi==0.115.11 + # via + # -r requirements/_test.in + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 + # via -r requirements/_test.in +faststream==0.5.35 + # via -r requirements/../../../packages/service-library/requirements/_base.in +flaky==3.8.1 + # via -r requirements/_test.in +frozenlist==1.5.0 + # via + # aiohttp + # aiosignal +googleapis-common-protos==1.68.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +grpcio==1.70.0 + # via opentelemetry-exporter-otlp-proto-grpc +idna==3.10 + # via + # anyio + # email-validator + # requests + # yarl +importlib-metadata==8.5.0 + # via opentelemetry-api +iniconfig==2.0.0 + # via pytest +jsonschema==4.23.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema-specifications==2024.10.1 + # via jsonschema +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +multidict==6.1.0 + # via + # aiohttp + # yarl +opentelemetry-api==1.30.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-logging + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-exporter-otlp==1.30.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-exporter-otlp-proto-common==1.30.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc==1.30.0 + # via opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http==1.30.0 + # via opentelemetry-exporter-otlp +opentelemetry-instrumentation==0.51b0 + # via + # opentelemetry-instrumentation-logging + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-logging==0.51b0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-redis==0.51b0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-requests==0.51b0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-proto==1.30.0 + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-sdk==1.30.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-semantic-conventions==0.51b0 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-util-http==0.51b0 + # via opentelemetry-instrumentation-requests +orjson==3.10.15 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in +packaging==24.2 + # via + # opentelemetry-instrumentation + # pytest +pamqp==3.3.0 + # via aiormq +pluggy==1.5.0 + # via pytest +propcache==0.3.0 + # via + # aiohttp + # yarl +protobuf==5.29.3 + # via + # googleapis-common-protos + # opentelemetry-proto +psutil==7.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # fast-depends + # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.27.2 + # via pydantic +pydantic-extra-types==2.10.2 + # via + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in +pydantic-settings==2.7.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +pygments==2.19.1 + # via rich +pyinstrument==5.0.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pytest==8.3.5 + # via + # -r requirements/_test.in + # pytest-asyncio + # pytest-cov + # pytest-mock +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest-cov==6.0.0 + # via -r requirements/_test.in +pytest-mock==3.14.0 + # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via arrow +python-dotenv==1.0.1 + # via + # -r requirements/_test.in + # pydantic-settings +pyyaml==6.0.2 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_test.in +redis==5.2.1 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in +referencing==0.35.1 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # jsonschema + # jsonschema-specifications +requests==2.32.3 + # via + # docker + # opentelemetry-exporter-otlp-proto-http +rich==13.9.4 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer +rpds-py==0.23.1 + # via + # jsonschema + # referencing +shellingham==1.5.4 + # via typer +six==1.17.0 + # via python-dateutil +sniffio==1.3.1 + # via + # anyio + # asgi-lifespan +starlette==0.46.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tenacity==9.0.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_test.in +toolz==1.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tqdm==4.67.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +typer==0.15.2 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +types-python-dateutil==2.9.0.20241206 + # via arrow +typing-extensions==4.12.2 + # via + # aiodebug + # anyio + # fastapi + # faststream + # opentelemetry-sdk + # pydantic + # pydantic-core + # pydantic-extra-types + # typer +tzdata==2025.1 + # via faker +urllib3==2.3.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # docker + # requests +wrapt==1.17.2 + # via + # deprecated + # opentelemetry-instrumentation + # opentelemetry-instrumentation-redis +yarl==1.18.3 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # aio-pika + # aiohttp + # aiormq +zipp==3.21.0 + # via importlib-metadata diff --git a/services/docker-api-proxy/requirements/_tools.in b/services/docker-api-proxy/requirements/_tools.in new file mode 100644 index 00000000000..140b6ed2e30 --- /dev/null +++ b/services/docker-api-proxy/requirements/_tools.in @@ -0,0 +1,6 @@ +--constraint ../../../requirements/constraints.txt + +--constraint _base.txt +--constraint _test.txt + +--requirement ../../../requirements/devenv.txt diff --git a/services/docker-api-proxy/requirements/_tools.txt b/services/docker-api-proxy/requirements/_tools.txt new file mode 100644 index 00000000000..24be1a5cfb8 --- /dev/null +++ b/services/docker-api-proxy/requirements/_tools.txt @@ -0,0 +1,80 @@ +astroid==3.3.8 + # via pylint +black==25.1.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.1.8 + # via + # -c requirements/_test.txt + # black + # pip-tools +dill==0.3.9 + # via pylint +distlib==0.3.9 + # via virtualenv +filelock==3.17.0 + # via virtualenv +identify==2.6.8 + # via pre-commit +isort==6.0.1 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.15.0 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.0.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==24.2 + # via + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via black +pip==25.0.1 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.6 + # via + # black + # pylint + # virtualenv +pre-commit==4.1.0 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.4 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_test.txt + # pre-commit +ruff==0.9.9 + # via -r requirements/../../../requirements/devenv.txt +setuptools==75.8.2 + # via pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.12.2 + # via + # -c requirements/_test.txt + # mypy +virtualenv==20.29.2 + # via pre-commit +wheel==0.45.1 + # via pip-tools diff --git a/services/docker-api-proxy/requirements/ci.txt b/services/docker-api-proxy/requirements/ci.txt new file mode 100644 index 00000000000..419f091d4d0 --- /dev/null +++ b/services/docker-api-proxy/requirements/ci.txt @@ -0,0 +1,18 @@ +# Shortcut to install all packages for the contigous integration (CI) of 'services/api-server' +# +# - As ci.txt but w/ tests +# +# Usage: +# pip install -r requirements/ci.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt + +# installs this repo's packages +simcore-common-library @ ../../packages/common-library/ +simcore-models-library @ ../../packages/models-library/ +pytest-simcore @ ../../packages/pytest-simcore/ +simcore-service-library @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library/ diff --git a/services/docker-api-proxy/requirements/dev.txt b/services/docker-api-proxy/requirements/dev.txt new file mode 100644 index 00000000000..57a8239ab79 --- /dev/null +++ b/services/docker-api-proxy/requirements/dev.txt @@ -0,0 +1,19 @@ +# Shortcut to install all packages needed to develop 'services/docker-api-proxy' +# +# - As ci.txt but with current and repo packages in develop (edit) mode +# +# Usage: +# pip install -r requirements/dev.txt +# + +# installs this repo's packages +--editable ../../packages/common-library +--editable ../../packages/models-library +--editable ../../packages/pytest-simcore +--editable ../../packages/service-library +--editable ../../packages/settings-library + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt diff --git a/services/docker-api-proxy/tests/integration/autentication-proxy-docker-compose.yaml b/services/docker-api-proxy/tests/integration/autentication-proxy-docker-compose.yaml new file mode 100644 index 00000000000..44d7e02d21f --- /dev/null +++ b/services/docker-api-proxy/tests/integration/autentication-proxy-docker-compose.yaml @@ -0,0 +1,14 @@ +version: '3.8' +services: + caddy: + image: caddy:2.9.1-alpine + ports: + - 9999:9999 + command: sh -c "echo '${CADDY_FILE}' > /etc/caddy/Caddyfile && cat /etc/caddy/Caddyfile && caddy run --adapter caddyfile --config /etc/caddy/Caddyfile" + networks: + - docker-api-network + +networks: + docker-api-network: + name: pytest-simcore_docker-api-network + external: true diff --git a/services/docker-api-proxy/tests/integration/conftest.py b/services/docker-api-proxy/tests/integration/conftest.py new file mode 100644 index 00000000000..0d02392f917 --- /dev/null +++ b/services/docker-api-proxy/tests/integration/conftest.py @@ -0,0 +1,67 @@ +# pylint:disable=unrecognized-options + +from collections.abc import AsyncIterator, Callable +from contextlib import AbstractAsyncContextManager, asynccontextmanager +from typing import Annotated + +import aiodocker +import pytest +from asgi_lifespan import LifespanManager +from fastapi import FastAPI +from pydantic import Field +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from servicelib.fastapi.docker import ( + get_lifespan_remote_docker_client, + get_remote_docker_client, +) +from servicelib.fastapi.lifespan_utils import combine_lifespans +from settings_library.application import BaseApplicationSettings +from settings_library.docker_api_proxy import DockerApiProxysettings + +pytest_plugins = [ + "pytest_simcore.docker_api_proxy", + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", + "pytest_simcore.repository_paths", + "pytest_simcore.simcore_services", +] + + +def pytest_configure(config): + # Set asyncio_mode to "auto" + config.option.asyncio_mode = "auto" + + +def _get_test_app() -> FastAPI: + class ApplicationSetting(BaseApplicationSettings): + DOCKER_API_PROXY: Annotated[ + DockerApiProxysettings, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + settings = ApplicationSetting.create_from_envs() + + app = FastAPI( + lifespan=combine_lifespans( + get_lifespan_remote_docker_client(settings.DOCKER_API_PROXY) + ) + ) + app.state.settings = settings + + return app + + +@pytest.fixture +async def setup_docker_client( + monkeypatch: pytest.MonkeyPatch, +) -> Callable[[EnvVarsDict], AbstractAsyncContextManager[aiodocker.Docker]]: + @asynccontextmanager + async def _(env_vars: EnvVarsDict) -> AsyncIterator[aiodocker.Docker]: + setenvs_from_dict(monkeypatch, env_vars) + + app = _get_test_app() + + async with LifespanManager(app, startup_timeout=30, shutdown_timeout=30): + yield get_remote_docker_client(app) + + return _ diff --git a/services/docker-api-proxy/tests/integration/test_docker_api_proxy.py b/services/docker-api-proxy/tests/integration/test_docker_api_proxy.py new file mode 100644 index 00000000000..1e3f4641d9e --- /dev/null +++ b/services/docker-api-proxy/tests/integration/test_docker_api_proxy.py @@ -0,0 +1,33 @@ +# pylint: disable=unused-argument + +import json +import sys +from collections.abc import Callable +from contextlib import AbstractAsyncContextManager +from pathlib import Path + +import aiodocker +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict +from settings_library.docker_api_proxy import DockerApiProxysettings + +HERE = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + +pytest_simcore_core_services_selection = [ + "docker-api-proxy", +] + + +async def test_unauthenticated_docker_client( + docker_swarm: None, + docker_api_proxy_settings: DockerApiProxysettings, + setup_docker_client: Callable[ + [EnvVarsDict], AbstractAsyncContextManager[aiodocker.Docker] + ], +): + envs = { + "DOCKER_API_PROXY_HOST": "127.0.0.1", + "DOCKER_API_PROXY_PORT": "8014", + } + async with setup_docker_client(envs) as working_docker: + info = await working_docker.system.info() + print(json.dumps(info, indent=2)) diff --git a/services/docker-api-proxy/tests/integration/test_docker_api_proxy_autenticated.py b/services/docker-api-proxy/tests/integration/test_docker_api_proxy_autenticated.py new file mode 100644 index 00000000000..a40437e3c78 --- /dev/null +++ b/services/docker-api-proxy/tests/integration/test_docker_api_proxy_autenticated.py @@ -0,0 +1,123 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + +import json +import subprocess +import sys +from collections.abc import Callable, Iterator +from contextlib import AbstractAsyncContextManager +from pathlib import Path + +import aiodocker +import pytest +from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict +from settings_library.docker_api_proxy import DockerApiProxysettings +from tenacity import AsyncRetrying, stop_after_delay, wait_fixed + +pytest_simcore_core_services_selection = [ + "docker-api-proxy", +] + +_HERE = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + + +@pytest.fixture +def authentication_proxy_compose_path() -> Path: + compose_spec_path = _HERE / "autentication-proxy-docker-compose.yaml" + assert compose_spec_path.exists() + return compose_spec_path + + +@pytest.fixture +def caddy_file() -> str: + # NOTE: the basicauth encrypeted credentials are `asd:asd`` + return """ +:9999 { + handle { + basicauth { + asd $2a$14$slb.GSUwFUX4jPOMoYTmKePjH.2UPJBkLmTPT5RmOfn38seYld1nu + } + reverse_proxy http://docker-api-proxy:8888 { + health_uri /version + } + } +} + """ + + +@pytest.fixture +def mock_wait_till_docker_api_proxy_is_responsive(mocker: MockerFixture) -> None: + mocker.patch("servicelib.fastapi.docker.wait_till_docker_api_proxy_is_responsive") + + +@pytest.fixture +def authentication_proxy( + mock_wait_till_docker_api_proxy_is_responsive: None, + docker_swarm: None, + docker_api_proxy_settings: DockerApiProxysettings, + caddy_file: str, + authentication_proxy_compose_path: Path, +) -> Iterator[None]: + + stack_name = "with-auth" + subprocess.run( # noqa: S603 + [ # noqa: S607 + "docker", + "stack", + "deploy", + "-c", + authentication_proxy_compose_path, + stack_name, + ], + check=True, + env={"CADDY_FILE": caddy_file}, + ) + + yield + + subprocess.run( # noqa: S603 + ["docker", "stack", "rm", stack_name], check=True # noqa: S607 + ) + + +async def test_with_correct_credentials( + authentication_proxy: None, + setup_docker_client: Callable[ + [EnvVarsDict], AbstractAsyncContextManager[aiodocker.Docker] + ], +): + envs = { + "DOCKER_API_PROXY_HOST": "127.0.0.1", + "DOCKER_API_PROXY_PORT": "9999", + "DOCKER_API_PROXY_USER": "asd", + "DOCKER_API_PROXY_PASSWORD": "asd", + } + async with setup_docker_client(envs) as working_docker: + async for attempt in AsyncRetrying( + wait=wait_fixed(0.1), stop=stop_after_delay(10), reraise=True + ): + with attempt: + info = await working_docker.system.info() + print(json.dumps(info, indent=2)) + + +async def test_wrong_credentials( + authentication_proxy: None, + setup_docker_client: Callable[ + [EnvVarsDict], AbstractAsyncContextManager[aiodocker.Docker] + ], +): + envs = { + "DOCKER_API_PROXY_HOST": "127.0.0.1", + "DOCKER_API_PROXY_PORT": "9999", + "DOCKER_API_PROXY_USER": "wrong", + "DOCKER_API_PROXY_PASSWORD": "wrong", + } + async with setup_docker_client(envs) as failing_docker_client: + async for attempt in AsyncRetrying( + wait=wait_fixed(0.1), stop=stop_after_delay(10), reraise=True + ): + with attempt: # noqa: SIM117 + with pytest.raises(aiodocker.exceptions.DockerError, match="401"): + await failing_docker_client.system.info() diff --git a/services/docker-compose-build.yml b/services/docker-compose-build.yml index becf5ce2a25..7fe0baf7024 100644 --- a/services/docker-compose-build.yml +++ b/services/docker-compose-build.yml @@ -316,6 +316,22 @@ services: org.opencontainers.image.revision: "${VCS_REF}" io.osparc.api-version: "${DYNAMIC_SCHEDULER_API_VERSION}" + docker-api-proxy: + image: local/docker-api-proxy:${BUILD_TARGET:?build_target_required} + build: + context: ../ + dockerfile: services/docker-api-proxy/Dockerfile + cache_from: + - local/docker-api-proxy:${BUILD_TARGET:?build_target_required} + - ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:master-github-latest + - ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:staging-github-latest + - ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:release-github-latest + target: ${BUILD_TARGET:?build_target_required} + labels: + org.opencontainers.image.created: "${BUILD_DATE}" + org.opencontainers.image.source: "${VCS_URL}" + org.opencontainers.image.revision: "${VCS_REF}" + datcore-adapter: image: local/datcore-adapter:${BUILD_TARGET:?build_target_required} build: diff --git a/services/docker-compose-deploy.yml b/services/docker-compose-deploy.yml index 1da5f7933de..f3997a0b11d 100644 --- a/services/docker-compose-deploy.yml +++ b/services/docker-compose-deploy.yml @@ -29,6 +29,8 @@ services: image: ${DOCKER_REGISTRY:-itisfoundation}/payments:${DOCKER_IMAGE_TAG:-latest} dynamic-scheduler: image: ${DOCKER_REGISTRY:-itisfoundation}/dynamic-scheduler:${DOCKER_IMAGE_TAG:-latest} + docker-api-proxy: + image: ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:${DOCKER_IMAGE_TAG:-latest} resource-usage-tracker: image: ${DOCKER_REGISTRY:-itisfoundation}/resource-usage-tracker:${DOCKER_IMAGE_TAG:-latest} service-integration: diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml index c80befe2316..b244d224059 100644 --- a/services/docker-compose-ops.yml +++ b/services/docker-compose-ops.yml @@ -95,6 +95,7 @@ services: distributed_identifiers:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD}, deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD}, dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD} + celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD} # If you add/remove a db, do not forget to update the --databases entry in the docker-compose.yml ports: - "18081:8081" diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index 94bcf0c1a8e..a9928bc4cda 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -9,6 +9,7 @@ x-common-environment: &common-environment # Enforces app to boot debug mode (see docker/boot.sh). ONLY allowed in devel-mode! SC_BOOT_MODE : debug + services: api-server: environment: @@ -18,6 +19,7 @@ services: volumes: - ./api-server:/devel/services/api-server - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv autoscaling: environment: @@ -27,6 +29,7 @@ services: volumes: - ./autoscaling:/devel/services/autoscaling - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv invitations: environment: @@ -35,6 +38,7 @@ services: volumes: - ./invitations:/devel/services/invitations - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv payments: environment: @@ -43,6 +47,7 @@ services: volumes: - ./payments:/devel/services/payments - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv dynamic-schdlr: environment: @@ -52,6 +57,7 @@ services: volumes: - ./dynamic-scheduler:/devel/services/dynamic-scheduler - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv deploy: replicas: 1 @@ -64,6 +70,7 @@ services: volumes: - ./catalog:/devel/services/catalog - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv clusters-keeper: environment: @@ -72,6 +79,7 @@ services: volumes: - ./clusters-keeper:/devel/services/clusters-keeper - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv datcore-adapter: environment: @@ -80,6 +88,7 @@ services: volumes: - ./datcore-adapter:/devel/services/datcore-adapter - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv director: environment: @@ -88,7 +97,7 @@ services: volumes: - ./director:/devel/services/director - ../packages:/devel/packages - - ../api:/devel/services/api + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv director-v2: environment: @@ -100,6 +109,7 @@ services: volumes: - ./director-v2:/devel/services/director-v2 - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv efs-guardian: environment: @@ -108,6 +118,7 @@ services: volumes: - ./efs-guardian:/devel/services/efs-guardian - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv deploy: replicas: 0 @@ -122,6 +133,7 @@ services: volumes: &webserver_volumes_devel - ./web/server:/devel/services/web/server - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv environment: &webserver_environment_devel <<: *common-environment DEBUG: 1 # NOTE: gunicorn expects an int not a boolean @@ -151,6 +163,7 @@ services: &dask-sidecar_volumes_devel - ./dask-sidecar:/devel/services/dask-sidecar - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv - ${ETC_HOSTNAME:-/etc/hostname}:/home/scu/hostname:ro environment: @@ -179,11 +192,23 @@ services: volumes: - ./resource-usage-tracker:/devel/services/resource-usage-tracker - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv storage: volumes: - ./storage:/devel/services/storage - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv + environment: + <<: *common-environment + STORAGE_PROFILING : ${STORAGE_PROFILING} + STORAGE_LOGLEVEL: DEBUG + + sto-worker: + volumes: + - ./storage:/devel/services/storage + - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv environment: <<: *common-environment STORAGE_PROFILING : ${STORAGE_PROFILING} @@ -196,3 +221,4 @@ services: volumes: - ./agent:/devel/services/agent - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index 945dedc3a28..12a2abd44c6 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -107,6 +107,10 @@ services: deploy: replicas: 2 + docker-api-proxy: + ports: + - "8014:8888" + resource-usage-tracker: environment: RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT : 3000 @@ -122,6 +126,13 @@ services: - "8080" - "3003:3000" + sto-worker: + environment: + <<: *common_environment + STORAGE_REMOTE_DEBUGGING_PORT : 3000 + ports: + - "8080" + - "3021:3000" webserver: environment: &webserver_environment_local <<: *common_environment @@ -252,3 +263,8 @@ services: - traefik.http.routers.${SWARM_STACK_NAME}_whoami.rule=PathPrefix(`/whoami`) - traefik.http.routers.${SWARM_STACK_NAME}_whoami.entrypoints=traefik_monitor - traefik.http.routers.${SWARM_STACK_NAME}_whoami.middlewares=${SWARM_STACK_NAME}_gzip@swarm + +networks: + docker-api-network: + driver_opts: + {} # override 'encrypted' locally, some WSL versions have issues with encrypted networks SEE https://github.com/microsoft/WSL/issues/10029 diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 555f47c4c9d..cd26b7fbbf6 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -549,6 +549,7 @@ services: hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" networks: - default + - docker-api-network environment: LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} @@ -562,6 +563,8 @@ services: REDIS_SECURE: ${REDIS_SECURE} REDIS_USER: ${REDIS_USER} REDIS_PASSWORD: ${REDIS_PASSWORD} + CATALOG_HOST: ${CATALOG_HOST} + CATALOG_PORT: ${CATALOG_PORT} DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: ${DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER} @@ -573,6 +576,24 @@ services: DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: ${DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + DOCKER_API_PROXY_HOST: ${DOCKER_API_PROXY_HOST} + DOCKER_API_PROXY_PASSWORD: ${DOCKER_API_PROXY_PASSWORD} + DOCKER_API_PROXY_PORT: ${DOCKER_API_PROXY_PORT} + DOCKER_API_PROXY_SECURE: ${DOCKER_API_PROXY_SECURE} + DOCKER_API_PROXY_USER: ${DOCKER_API_PROXY_USER} + docker-api-proxy: + image: ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:${DOCKER_IMAGE_TAG:-latest} + init: true + deploy: + placement: + constraints: + - node.role == manager + mode: global + volumes: + - /var/run/docker.sock:/var/run/docker.sock + networks: + - docker-api-network + static-webserver: image: ${DOCKER_REGISTRY:-itisfoundation}/static-webserver:${DOCKER_IMAGE_TAG:-latest} init: true @@ -705,6 +726,13 @@ services: INVITATIONS_USERNAME: ${INVITATIONS_USERNAME} WEBSERVER_LICENSES: ${WEBSERVER_LICENSES} + LICENSES_ITIS_VIP_SYNCER_ENABLED : ${LICENSES_ITIS_VIP_SYNCER_ENABLED} + LICENSES_ITIS_VIP_SYNCER_PERIODICITY: ${LICENSES_ITIS_VIP_SYNCER_PERIODICITY} + LICENSES_ITIS_VIP_API_URL: ${LICENSES_ITIS_VIP_API_URL} + LICENSES_ITIS_VIP_CATEGORIES: ${LICENSES_ITIS_VIP_CATEGORIES} + LICENSES_SPEAG_PHANTOMS_API_URL: ${LICENSES_SPEAG_PHANTOMS_API_URL} + LICENSES_SPEAG_PHANTOMS_CATEGORIES: ${LICENSES_SPEAG_PHANTOMS_CATEGORIES} + WEBSERVER_LOGIN: ${WEBSERVER_LOGIN} LOGIN_ACCOUNT_DELETION_RETENTION_DAYS: ${LOGIN_ACCOUNT_DELETION_RETENTION_DAYS} @@ -805,13 +833,11 @@ services: WEBSERVER_NOTIFICATIONS: ${WEBSERVER_NOTIFICATIONS} WEBSERVER_CLUSTERS: ${WEBSERVER_CLUSTERS} WEBSERVER_GROUPS: ${WEBSERVER_GROUPS} - WEBSERVER_META_MODELING: ${WEBSERVER_META_MODELING} WEBSERVER_PRODUCTS: ${WEBSERVER_PRODUCTS} WEBSERVER_PUBLICATIONS: ${WEBSERVER_PUBLICATIONS} WEBSERVER_SOCKETIO: ${WEBSERVER_SOCKETIO} WEBSERVER_TAGS: ${WEBSERVER_TAGS} WEBSERVER_USERS: ${WEBSERVER_USERS} - WEBSERVER_VERSION_CONTROL: ${WEBSERVER_VERSION_CONTROL} WEBSERVER_FOLDERS: ${WEBSERVER_FOLDERS} deploy: @@ -899,10 +925,9 @@ services: WEBSERVER_GARBAGE_COLLECTOR: ${WB_DB_EL_GARBAGE_COLLECTOR} WEBSERVER_GROUPS: ${WB_DB_EL_GROUPS} WEBSERVER_INVITATIONS: ${WB_DB_EL_INVITATIONS} - WEBSERVER_LICENSES: 0 + WEBSERVER_LICENSES: null WEBSERVER_LOGIN: ${WB_DB_EL_LOGIN} WEBSERVER_PAYMENTS: ${WB_DB_EL_PAYMENTS} - WEBSERVER_META_MODELING: ${WB_DB_EL_META_MODELING} WEBSERVER_NOTIFICATIONS: ${WB_DB_EL_NOTIFICATIONS} WEBSERVER_PRODUCTS: ${WB_DB_EL_PRODUCTS} WEBSERVER_PROJECTS: ${WB_DB_EL_PROJECTS} @@ -915,7 +940,6 @@ services: WEBSERVER_TAGS: ${WB_DB_EL_TAGS} WEBSERVER_TRACING: ${WB_DB_EL_TRACING} WEBSERVER_USERS: ${WB_DB_EL_USERS} - WEBSERVER_VERSION_CONTROL: ${WB_DB_EL_VERSION_CONTROL} WEBSERVER_WALLETS: ${WB_DB_EL_WALLETS} # WEBSERVER_RABBITMQ @@ -947,16 +971,15 @@ services: init: true hostname: "gc-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: - WEBSERVER_LOGLEVEL: ${WB_GC_LOGLEVEL} - WEBSERVER_HOST: ${WEBSERVER_HOST} - WEBSERVER_PORT: ${WEBSERVER_PORT} + # WEBSERVER_DIRECTOR_V2 + DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} + DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} - # WEBSERVER_RESOURCE_USAGE_TRACKER - RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} - RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} + GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} - REST_SWAGGER_API_DOC_ENABLED: ${WB_GC_REST_SWAGGER_API_DOC_ENABLED} + LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} # WEBSERVER_DB POSTGRES_DB: ${POSTGRES_DB} @@ -966,48 +989,61 @@ services: POSTGRES_PORT: ${POSTGRES_PORT} POSTGRES_USER: ${POSTGRES_USER} - DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} - DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} - - GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} - - LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} - - STORAGE_HOST: ${STORAGE_HOST} - STORAGE_PORT: ${STORAGE_PORT} + # WEBSERVER_RABBITMQ + RABBIT_HOST: ${RABBIT_HOST} + RABBIT_PASSWORD: ${RABBIT_PASSWORD} + RABBIT_PORT: ${RABBIT_PORT} + RABBIT_SECURE: ${RABBIT_SECURE} + RABBIT_USER: ${RABBIT_USER} + # WEBSERVER_REDIS REDIS_HOST: ${REDIS_HOST} + REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PORT: ${REDIS_PORT} REDIS_SECURE: ${REDIS_SECURE} REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} - SWARM_STACK_NAME: ${SWARM_STACK_NAME} - - WEBSERVER_DB_LISTENER: ${WB_GC_DB_LISTENER} + # WEBSERVER_RESOURCE_MANAGER + RESOURCE_MANAGER_RESOURCE_TTL_S: ${WB_GC_RESOURCE_MANAGER_RESOURCE_TTL_S} - WEBSERVER_GARBAGE_COLLECTOR: ${WB_GC_GARBAGE_COLLECTOR} + # WEBSERVER_RESOURCE_USAGE_TRACKER + RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} + RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} - RESOURCE_MANAGER_RESOURCE_TTL_S: ${WB_GC_RESOURCE_MANAGER_RESOURCE_TTL_S} + REST_SWAGGER_API_DOC_ENABLED: ${WB_GC_REST_SWAGGER_API_DOC_ENABLED} + # WEBSERVER_SESSION SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY} + + # WEBSERVER_STORAGE + STORAGE_HOST: ${STORAGE_HOST} + STORAGE_PORT: ${STORAGE_PORT} + + SWARM_STACK_NAME: ${SWARM_STACK_NAME} + + # WEBSERVER_TRASH + TRASH_RETENTION_DAYS: ${TRASH_RETENTION_DAYS} + WEBSERVER_ACTIVITY: ${WB_GC_ACTIVITY} WEBSERVER_ANNOUNCEMENTS: ${WB_GC_ANNOUNCEMENTS} WEBSERVER_CATALOG: ${WB_GC_CATALOG} WEBSERVER_CLUSTERS: ${WB_GC_CLUSTERS} + WEBSERVER_DB_LISTENER: ${WB_GC_DB_LISTENER} WEBSERVER_DIAGNOSTICS: ${WB_GC_DIAGNOSTICS} WEBSERVER_EMAIL: ${WB_GC_EMAIL} WEBSERVER_EXPORTER: ${WB_GC_EXPORTER} WEBSERVER_FOLDERS: ${WB_GC_FOLDERS} WEBSERVER_FRONTEND: ${WB_GC_FRONTEND} + WEBSERVER_GARBAGE_COLLECTOR: ${WB_GC_GARBAGE_COLLECTOR} WEBSERVER_GROUPS: ${WB_GC_GROUPS} + WEBSERVER_HOST: ${WEBSERVER_HOST} WEBSERVER_INVITATIONS: ${WB_GC_INVITATIONS} - WEBSERVER_LICENSES: 0 + WEBSERVER_LICENSES: null WEBSERVER_LOGIN: ${WB_GC_LOGIN} - WEBSERVER_META_MODELING: ${WB_GC_META_MODELING} + WEBSERVER_LOGLEVEL: ${WB_GC_LOGLEVEL} WEBSERVER_NOTIFICATIONS: ${WB_GC_NOTIFICATIONS} WEBSERVER_PAYMENTS: ${WB_GC_PAYMENTS} + WEBSERVER_PORT: ${WEBSERVER_PORT} WEBSERVER_PRODUCTS: ${WB_GC_PRODUCTS} WEBSERVER_PROJECTS: ${WB_GC_PROJECTS} WEBSERVER_PUBLICATIONS: ${WB_GC_PUBLICATIONS} @@ -1018,15 +1054,9 @@ services: WEBSERVER_TAGS: ${WB_GC_TAGS} WEBSERVER_TRACING: ${WB_GC_TRACING} WEBSERVER_USERS: ${WB_GC_USERS} - WEBSERVER_VERSION_CONTROL: ${WB_GC_VERSION_CONTROL} WEBSERVER_WALLETS: ${WB_GC_WALLETS} - # WEBSERVER_RABBITMQ - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} + networks: - default - interactive_services_subnet @@ -1112,8 +1142,9 @@ services: networks: - storage_subnet environment: - DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} DATCORE_ADAPTER_LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + DATCORE_ADAPTER_TRACING: ${DATCORE_ADAPTER_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} @@ -1121,9 +1152,7 @@ services: image: ${DOCKER_REGISTRY:-itisfoundation}/storage:${DOCKER_IMAGE_TAG:-latest} init: true hostname: "sto-{{.Node.Hostname}}-{{.Task.Slot}}" - environment: - BF_API_KEY: ${BF_API_KEY} - BF_API_SECRET: ${BF_API_SECRET} + environment: &storage_environment DATCORE_ADAPTER_HOST: ${DATCORE_ADAPTER_HOST:-datcore-adapter} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} @@ -1133,6 +1162,11 @@ services: POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} POSTGRES_PORT: ${POSTGRES_PORT} POSTGRES_USER: ${POSTGRES_USER} + RABBIT_HOST: ${RABBIT_HOST} + RABBIT_PASSWORD: ${RABBIT_PASSWORD} + RABBIT_PORT: ${RABBIT_PORT} + RABBIT_SECURE: ${RABBIT_SECURE} + RABBIT_USER: ${RABBIT_USER} REDIS_HOST: ${REDIS_HOST} REDIS_PORT: ${REDIS_PORT} REDIS_SECURE: ${REDIS_SECURE} @@ -1143,18 +1177,30 @@ services: S3_ENDPOINT: ${S3_ENDPOINT} S3_REGION: ${S3_REGION} S3_SECRET_KEY: ${S3_SECRET_KEY} + STORAGE_WORKER_MODE: "false" STORAGE_LOGLEVEL: ${STORAGE_LOGLEVEL} STORAGE_MONITORING_ENABLED: 1 STORAGE_PROFILING: ${STORAGE_PROFILING} + STORAGE_PORT: ${STORAGE_PORT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - networks: + networks: &storage_networks - default - interactive_services_subnet - storage_subnet + sto-worker: + image: ${DOCKER_REGISTRY:-itisfoundation}/storage:${DOCKER_IMAGE_TAG:-master-github-latest} + init: true + hostname: "sto-worker-{{.Node.Hostname}}-{{.Task.Slot}}" + environment: + <<: *storage_environment + STORAGE_WORKER_MODE: "true" + CELERY_CONCURRENCY: 1 + networks: *storage_networks + rabbit: - image: itisfoundation/rabbitmq:3.11.2-management + image: itisfoundation/rabbitmq:3.13.7-management init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: @@ -1189,7 +1235,7 @@ services: - default # actually needed for the postgres service only postgres: - image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" + image: "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f" init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: @@ -1237,7 +1283,7 @@ services: "--loglevel", "verbose", "--databases", - "9", + "10", "--appendonly", "yes", "--requirepass", @@ -1352,6 +1398,15 @@ networks: internal: false labels: com.simcore.description: "computational services network" + docker-api-network: + name: ${SWARM_STACK_NAME}_docker-api-network + driver: overlay + attachable: true + internal: true + driver_opts: + encrypted: "true" + labels: + com.simcore.description: "used for internal access to the docker swarm api" secrets: dask_tls_key: diff --git a/services/dynamic-scheduler/docker/boot.sh b/services/dynamic-scheduler/docker/boot.sh index a5a3ea811d5..c6c3cd4c849 100755 --- a/services/dynamic-scheduler/docker/boot.sh +++ b/services/dynamic-scheduler/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/dynamic-scheduler - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/dynamic-scheduler/openapi.json b/services/dynamic-scheduler/openapi.json index 0b593da90d1..1f05e29ea25 100644 --- a/services/dynamic-scheduler/openapi.json +++ b/services/dynamic-scheduler/openapi.json @@ -77,6 +77,9 @@ }, "docs_url": { "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri", "title": "Docs Url" } }, diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index b9827f8175e..cf0ac62a608 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.0 +aio-pika==9.5.5 # via -r requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 # via -r requirements/../../../packages/service-library/requirements/_base.in @@ -10,9 +10,9 @@ aiofiles==24.1.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # nicegui -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.5.0 # via aiohttp -aiohttp==3.11.7 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -33,13 +33,13 @@ aiohttp==3.11.7 # python-socketio aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp -alembic==1.14.0 +alembic==1.15.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in annotated-types==0.7.0 # via pydantic -anyio==4.6.2.post1 +anyio==4.8.0 # via # fast-depends # faststream @@ -56,14 +56,14 @@ asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 # via sqlalchemy -attrs==24.2.0 +attrs==25.1.0 # via # aiohttp # jsonschema # referencing bidict==0.23.1 # via python-socketio -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -83,13 +83,13 @@ certifi==2024.8.30 # httpx # nicegui # requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests -click==8.1.7 +click==8.1.8 # via # typer # uvicorn -deprecated==1.2.15 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -105,24 +105,27 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.5 +fastapi==0.115.11 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager # nicegui -faststream==0.5.31 +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +faststream==0.5.35 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.69.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http greenlet==3.1.1 # via sqlalchemy -grpcio==1.68.0 +grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via @@ -133,7 +136,7 @@ httpcore==1.0.7 # via httpx httptools==0.6.4 # via uvicorn -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -165,7 +168,7 @@ importlib-metadata==8.5.0 # via opentelemetry-api itsdangerous==2.2.0 # via nicegui -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -188,7 +191,7 @@ jsonschema==4.23.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.6 +mako==1.3.9 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -207,7 +210,7 @@ mako==1.3.6 # alembic markdown-it-py==3.0.0 # via rich -markdown2==2.5.1 +markdown2==2.5.3 # via nicegui markupsafe==3.0.2 # via @@ -219,9 +222,9 @@ multidict==6.1.0 # via # aiohttp # yarl -nicegui==2.7.0 +nicegui==2.12.1 # via -r requirements/_base.in -opentelemetry-api==1.28.2 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -236,17 +239,17 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.30.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg @@ -255,31 +258,31 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.51b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.49b2 +opentelemetry-instrumentation-asyncpg==0.51b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.49b2 +opentelemetry-instrumentation-fastapi==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -289,13 +292,13 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests -orjson==3.10.12 +orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -327,27 +330,29 @@ packaging==24.2 # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq -prometheus-client==0.21.0 +prometheus-client==0.21.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # prometheus-fastapi-instrumentator -prometheus-fastapi-instrumentator==7.0.0 +prometheus-fastapi-instrumentator==7.0.2 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -propcache==0.2.0 +propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.28.3 +protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto pscript==0.7.7 # via vbuild -psutil==6.1.0 +psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via sqlalchemy -pydantic==2.10.2 +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -380,9 +385,9 @@ pydantic==2.10.2 # fastapi # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.2 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -393,17 +398,31 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -pygments==2.18.0 +pygments==2.19.1 # via # nicegui # rich -pyinstrument==5.0.0 +pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -411,11 +430,11 @@ python-dotenv==1.0.1 # via # pydantic-settings # uvicorn -python-engineio==4.10.1 +python-engineio==4.11.2 # via python-socketio -python-multipart==0.0.17 +python-multipart==0.0.20 # via nicegui -python-socketio==5.11.4 +python-socketio==5.12.1 # via # -r requirements/_base.in # nicegui @@ -456,6 +475,20 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -467,7 +500,7 @@ rich==13.9.4 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.21.0 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -475,12 +508,10 @@ shellingham==1.5.4 # via typer simple-websocket==1.1.0 # via python-engineio -six==1.16.0 +six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx + # via anyio sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -499,7 +530,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.41.3 +starlette==0.46.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -517,23 +548,26 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.13.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in -types-python-dateutil==2.9.0.20241003 +types-python-dateutil==2.9.0.20241206 # via arrow typing-extensions==4.12.2 # via # aiodebug # alembic + # anyio # fastapi # faststream # nicegui @@ -544,7 +578,7 @@ typing-extensions==4.12.2 # typer u-msgpack-python==2.8.0 # via -r requirements/_base.in -urllib3==2.2.3 +urllib3==2.3.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -562,7 +596,7 @@ urllib3==2.2.3 # -c requirements/../../../requirements/constraints.txt # nicegui # requests -uvicorn==0.32.1 +uvicorn==0.34.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in @@ -571,13 +605,13 @@ uvloop==0.21.0 # via uvicorn vbuild==0.8.2 # via nicegui -watchfiles==1.0.0 +watchfiles==1.0.4 # via # nicegui # uvicorn -websockets==14.1 +websockets==15.0.1 # via uvicorn -wrapt==1.17.0 +wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation @@ -585,7 +619,7 @@ wrapt==1.17.0 # opentelemetry-instrumentation-redis wsproto==1.2.0 # via simple-websocket -yarl==1.18.0 +yarl==1.18.3 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt index 9be4c5e4f9f..f8ded032d0b 100644 --- a/services/dynamic-scheduler/requirements/_test.txt +++ b/services/dynamic-scheduler/requirements/_test.txt @@ -1,27 +1,27 @@ -anyio==4.6.2.post1 +anyio==4.8.0 # via # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # httpcore # httpx # requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==35.0.0 +faker==36.2.2 # via -r requirements/_test.in greenlet==3.1.1 # via @@ -33,7 +33,7 @@ h11==0.14.0 # httpcore # hypercorn # wsproto -h2==4.1.0 +h2==4.2.0 # via hypercorn hpack==4.1.0 # via h2 @@ -41,7 +41,7 @@ httpcore==1.0.7 # via # -c requirements/_base.txt # httpx -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -65,7 +65,7 @@ packaging==24.2 # -c requirements/_base.txt # pytest # pytest-sugar -playwright==1.49.1 +playwright==1.50.0 # via -r requirements/_test.in pluggy==1.5.0 # via pytest @@ -73,9 +73,9 @@ pprintpp==0.4.0 # via pytest-icdiff priority==2.0.0 # via hypercorn -pyee==12.0.0 +pyee==12.1.1 # via playwright -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -97,10 +97,6 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker python-dotenv==1.0.1 # via # -c requirements/_base.txt @@ -111,24 +107,21 @@ requests==2.32.3 # docker respx==0.22.0 # via -r requirements/_test.in -six==1.16.0 - # via - # -c requirements/_base.txt - # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt # anyio # asgi-lifespan - # httpx termcolor==2.5.0 # via pytest-sugar typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker + # anyio # pyee -urllib3==2.2.3 +tzdata==2025.1 + # via faker +urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/dynamic-scheduler/requirements/_tools.txt b/services/dynamic-scheduler/requirements/_tools.txt index 15433e37333..3c53f6540c2 100644 --- a/services/dynamic-scheduler/requirements/_tools.txt +++ b/services/dynamic-scheduler/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # black @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -43,7 +43,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -54,7 +54,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -65,9 +65,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -76,7 +76,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.3 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/__init__.py index 7f991346a4b..71c3aa2c862 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/__init__.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/__init__.py @@ -1,3 +1,3 @@ -from ._setup import setup_frontend +from ._setup import initialize_frontend -__all__: tuple[str, ...] = ("setup_frontend",) +__all__: tuple[str, ...] = ("initialize_frontend",) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py index 50bb82fc0f3..d56da5f43f4 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py @@ -6,7 +6,7 @@ from .routes import router -def setup_frontend(app: FastAPI) -> None: +def initialize_frontend(app: FastAPI) -> None: settings: ApplicationSettings = app.state.settings nicegui.app.include_router(router) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py index ff5fe204132..e72f897ca74 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py @@ -1,17 +1,20 @@ from typing import Annotated import arrow -from fastapi import APIRouter, Depends +from fastapi import APIRouter, Depends, FastAPI from fastapi.responses import PlainTextResponse from models_library.errors import ( + DOCKER_API_PROXY_UNHEALTHY_MSG, RABBITMQ_CLIENT_UNHEALTHY_MSG, REDIS_CLIENT_UNHEALTHY_MSG, ) +from servicelib.fastapi.docker import is_docker_api_proxy_ready from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase from ._dependencies import ( + get_app, get_rabbitmq_client_from_request, get_rabbitmq_rpc_server_from_request, get_redis_clients_from_request, @@ -26,6 +29,7 @@ class HealthCheckError(RuntimeError): @router.get("/health", response_class=PlainTextResponse) async def healthcheck( + app: Annotated[FastAPI, Depends(get_app)], rabbit_client: Annotated[RabbitMQClient, Depends(get_rabbitmq_client_from_request)], rabbit_rpc_server: Annotated[ RabbitMQRPCClient, Depends(get_rabbitmq_rpc_server_from_request) @@ -35,6 +39,9 @@ async def healthcheck( Depends(get_redis_clients_from_request), ], ): + if not await is_docker_api_proxy_ready(app, timeout=1): + raise HealthCheckError(DOCKER_API_PROXY_UNHEALTHY_MSG) + if not rabbit_client.healthy or not rabbit_rpc_server.healthy: raise HealthCheckError(RABBITMQ_CLIENT_UNHEALTHY_MSG) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/routes.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/routes.py index 8c1d3e21ed8..9d0b45de981 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/routes.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/routes.py @@ -8,7 +8,7 @@ from . import _health, _meta -def setup_rest_api(app: FastAPI): +def initialize_rest_api(app: FastAPI) -> None: app.include_router(_health.router) api_router = APIRouter(prefix=f"/{API_VTAG}") diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/routes.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/routes.py index 77c544e5354..2a2f4a3afd3 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/routes.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/routes.py @@ -1,4 +1,7 @@ +from collections.abc import AsyncIterator + from fastapi import FastAPI +from fastapi_lifespan_manager import State from models_library.api_schemas_dynamic_scheduler import DYNAMIC_SCHEDULER_RPC_NAMESPACE from servicelib.rabbitmq import RPCRouter @@ -10,12 +13,9 @@ ] -def setup_rpc_api_routes(app: FastAPI) -> None: - async def startup() -> None: - rpc_server = get_rabbitmq_rpc_server(app) - for router in ROUTERS: - await rpc_server.register_router( - router, DYNAMIC_SCHEDULER_RPC_NAMESPACE, app - ) +async def lifespan_rpc_api_routes(app: FastAPI) -> AsyncIterator[State]: + rpc_server = get_rabbitmq_rpc_server(app) + for router in ROUTERS: + await rpc_server.register_router(router, DYNAMIC_SCHEDULER_RPC_NAMESPACE, app) - app.add_event_handler("startup", startup) + yield {} diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py index 0b7d56fccda..ed05a7bb265 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py @@ -2,6 +2,7 @@ import os import typer +from settings_library.docker_api_proxy import DockerApiProxysettings from settings_library.rabbit import RabbitSettings from settings_library.utils_cli import ( create_settings_command, @@ -56,6 +57,14 @@ def echo_dotenv(ctx: typer.Context, *, minimal: bool = True): "DYNAMIC_SCHEDULER_UI_STORAGE_SECRET", "replace-with-ui-storage-secret", ), + DYNAMIC_SCHEDULER_DOCKER_API_PROXY=os.environ.get( + "DYNAMIC_SCHEDULER_DOCKER_API_PROXY", + DockerApiProxysettings.create_from_envs( + DOCKER_API_PROXY_HOST=os.environ.get( + "DOCKER_API_PROXY_HOST", "replace-with-proxy-host" + ) + ), + ), ) print_as_envfile( diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py index 7cbcb7355a9..9502da022fa 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py @@ -1,10 +1,16 @@ +from collections.abc import AsyncIterator + from fastapi import FastAPI +from fastapi_lifespan_manager import State +from servicelib.fastapi.docker import get_lifespan_remote_docker_client +from servicelib.fastapi.lifespan_utils import LifespanGenerator, combine_lifespans from servicelib.fastapi.openapi import override_fastapi_openapi_method -from servicelib.fastapi.profiler_middleware import ProfilerMiddleware +from servicelib.fastapi.profiler import initialize_profiler from servicelib.fastapi.prometheus_instrumentation import ( - setup_prometheus_instrumentation, + initialize_prometheus_instrumentation, + lifespan_prometheus_instrumentation, ) -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -15,23 +21,50 @@ PROJECT_NAME, SUMMARY, ) -from ..api.frontend import setup_frontend -from ..api.rest.routes import setup_rest_api -from ..api.rpc.routes import setup_rpc_api_routes -from ..services.deferred_manager import setup_deferred_manager -from ..services.director_v0 import setup_director_v0 -from ..services.director_v2 import setup_director_v2 -from ..services.notifier import setup_notifier -from ..services.rabbitmq import setup_rabbitmq -from ..services.redis import setup_redis -from ..services.service_tracker import setup_service_tracker -from ..services.status_monitor import setup_status_monitor +from ..api.frontend import initialize_frontend +from ..api.rest.routes import initialize_rest_api +from ..api.rpc.routes import lifespan_rpc_api_routes +from ..services.catalog import lifespan_catalog +from ..services.deferred_manager import lifespan_deferred_manager +from ..services.director_v0 import lifespan_director_v0 +from ..services.director_v2 import lifespan_director_v2 +from ..services.notifier import get_lifespans_notifier +from ..services.rabbitmq import lifespan_rabbitmq +from ..services.redis import lifespan_redis +from ..services.service_tracker import lifespan_service_tracker +from ..services.status_monitor import lifespan_status_monitor from .settings import ApplicationSettings +async def _lifespan_banner(app: FastAPI) -> AsyncIterator[State]: + _ = app + print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 + yield {} + print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 + + def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app_settings = settings or ApplicationSettings.create_from_envs() + lifespans: list[LifespanGenerator] = [ + lifespan_director_v2, + lifespan_director_v0, + lifespan_catalog, + lifespan_rabbitmq, + lifespan_rpc_api_routes, + lifespan_redis, + *get_lifespans_notifier(), + lifespan_service_tracker, + lifespan_deferred_manager, + lifespan_status_monitor, + get_lifespan_remote_docker_client( + app_settings.DYNAMIC_SCHEDULER_DOCKER_API_PROXY + ), + ] + + if app_settings.DYNAMIC_SCHEDULER_PROMETHEUS_INSTRUMENTATION_ENABLED: + lifespans.append(lifespan_prometheus_instrumentation) + app = FastAPI( title=f"{PROJECT_NAME} web API", description=SUMMARY, @@ -40,7 +73,8 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: docs_url=( "/doc" if app_settings.DYNAMIC_SCHEDULER_SWAGGER_API_DOC_ENABLED else None ), - redoc_url=None, # default disabled, see below + redoc_url=None, + lifespan=combine_lifespans(*lifespans, _lifespan_banner), ) override_fastapi_openapi_method(app) @@ -48,48 +82,16 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app.state.settings = app_settings assert app.state.settings.API_VERSION == API_VERSION # nosec - if app.state.settings.DYNAMIC_SCHEDULER_PROMETHEUS_INSTRUMENTATION_ENABLED: - setup_prometheus_instrumentation(app) - - if app.state.settings.DYNAMIC_SCHEDULER_PROFILING: - app.add_middleware(ProfilerMiddleware) - if app.state.settings.DYNAMIC_SCHEDULER_TRACING: - setup_tracing( - app, - app.state.settings.DYNAMIC_SCHEDULER_TRACING, - APP_NAME, - ) - - # PLUGINS SETUP - - setup_director_v2(app) - setup_director_v0(app) - - setup_rabbitmq(app) - setup_rpc_api_routes(app) - - setup_redis(app) - - setup_notifier(app) - - setup_service_tracker(app) - setup_deferred_manager(app) - setup_status_monitor(app) - - setup_rest_api(app) - setup_frontend(app) + initialize_rest_api(app) - # ERROR HANDLERS - # ... add here ... + initialize_prometheus_instrumentation(app) - # EVENTS - async def _on_startup() -> None: - print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 + initialize_frontend(app) - async def _on_shutdown() -> None: - print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 + if app_settings.DYNAMIC_SCHEDULER_TRACING: + initialize_tracing(app, app_settings.DYNAMIC_SCHEDULER_TRACING, APP_NAME) - app.add_event_handler("startup", _on_startup) - app.add_event_handler("shutdown", _on_shutdown) + if app_settings.DYNAMIC_SCHEDULER_PROFILING: + initialize_profiler(app) return app diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index 9531641897f..2bec84f3690 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -5,8 +5,10 @@ from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag +from settings_library.catalog import CatalogSettings from settings_library.director_v0 import DirectorV0Settings from settings_library.director_v2 import DirectorV2Settings +from settings_library.docker_api_proxy import DockerApiProxysettings from settings_library.http_client_request import ClientRequestSettings from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings @@ -144,6 +146,11 @@ class ApplicationSettings(_BaseApplicationSettings): description="settings for director-v2 service", ) + DYNAMIC_SCHEDULER_CATALOG_SETTINGS: CatalogSettings = Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for catalog service", + ) + DYNAMIC_SCHEDULER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True DYNAMIC_SCHEDULER_PROFILING: bool = False @@ -152,6 +159,11 @@ class ApplicationSettings(_BaseApplicationSettings): description="settings for opentelemetry tracing", ) + DYNAMIC_SCHEDULER_DOCKER_API_PROXY: Annotated[ + DockerApiProxysettings, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + @field_validator("DYNAMIC_SCHEDULER_UI_MOUNT_PATH", mode="before") @classmethod def _ensure_ends_with_slash(cls, v: str) -> str: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/__init__.py new file mode 100644 index 00000000000..86e004ee9b0 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/__init__.py @@ -0,0 +1,7 @@ +from ._public_client import CatalogPublicClient +from ._setup import lifespan_catalog + +__all__: tuple[str, ...] = ( + "CatalogPublicClient", + "lifespan_catalog", +) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_public_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_public_client.py new file mode 100644 index 00000000000..fbe160b261a --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_public_client.py @@ -0,0 +1,34 @@ +from fastapi import FastAPI +from models_library.api_schemas_catalog.services_specifications import ( + ServiceSpecifications, +) +from models_library.service_settings_labels import SimcoreServiceLabels +from models_library.services import ServiceKey, ServiceVersion +from models_library.users import UserID +from pydantic import TypeAdapter +from servicelib.fastapi.app_state import SingletonInAppStateMixin + +from ._thin_client import CatalogThinClient + + +class CatalogPublicClient(SingletonInAppStateMixin): + app_state_name: str = "catalog_public_client" + + def __init__(self, app: FastAPI) -> None: + self.app = app + + async def get_services_labels( + self, service_key: ServiceKey, service_version: ServiceVersion + ) -> SimcoreServiceLabels: + response = await CatalogThinClient.get_from_app_state( + self.app + ).get_services_labels(service_key, service_version) + return TypeAdapter(SimcoreServiceLabels).validate_python(response.json()) + + async def get_services_specifications( + self, user_id: UserID, service_key: ServiceKey, service_version: ServiceVersion + ) -> ServiceSpecifications: + response = await CatalogThinClient.get_from_app_state( + self.app + ).get_services_specifications(user_id, service_key, service_version) + return TypeAdapter(ServiceSpecifications).validate_python(response.json()) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_setup.py new file mode 100644 index 00000000000..92d7b7617eb --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_setup.py @@ -0,0 +1,21 @@ +from collections.abc import AsyncIterator + +from fastapi import FastAPI +from fastapi_lifespan_manager import State + +from ._public_client import CatalogPublicClient +from ._thin_client import CatalogThinClient + + +async def lifespan_catalog(app: FastAPI) -> AsyncIterator[State]: + thin_client = CatalogThinClient(app) + thin_client.set_to_app_state(app) + thin_client.attach_lifespan_to(app) + + public_client = CatalogPublicClient(app) + public_client.set_to_app_state(app) + + yield {} + + CatalogPublicClient.pop_from_app_state(app) + CatalogThinClient.pop_from_app_state(app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_thin_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_thin_client.py new file mode 100644 index 00000000000..98cf8b7e0ae --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/catalog/_thin_client.py @@ -0,0 +1,57 @@ +import urllib.parse + +from fastapi import FastAPI, status +from httpx import Response +from models_library.services import ServiceKey, ServiceVersion +from models_library.users import UserID +from servicelib.fastapi.app_state import SingletonInAppStateMixin +from servicelib.fastapi.http_client import AttachLifespanMixin +from servicelib.fastapi.http_client_thin import ( + BaseThinClient, + expect_status, + retry_on_errors, +) +from yarl import URL + +from ...core.settings import ApplicationSettings + + +class CatalogThinClient(SingletonInAppStateMixin, BaseThinClient, AttachLifespanMixin): + app_state_name: str = "catalog_thin_client" + + def __init__(self, app: FastAPI) -> None: + settings: ApplicationSettings = app.state.settings + assert settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT # nosec + + super().__init__( + total_retry_interval=int( + settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT + ), + extra_allowed_method_names={ + "attach_lifespan_to", + "get_from_app_state", + "pop_from_app_state", + "set_to_app_state", + }, + base_url=settings.DYNAMIC_SCHEDULER_CATALOG_SETTINGS.api_base_url, + tracing_settings=settings.DYNAMIC_SCHEDULER_TRACING, + ) + + @retry_on_errors() + @expect_status(status.HTTP_200_OK) + async def get_services_labels( + self, service_key: ServiceKey, service_version: ServiceVersion + ) -> Response: + return await self.client.get( + f"/services/{urllib.parse.quote(service_key, safe='')}/{service_version}/labels" + ) + + @retry_on_errors() + @expect_status(status.HTTP_200_OK) + async def get_services_specifications( + self, user_id: UserID, service_key: ServiceKey, service_version: ServiceVersion + ) -> Response: + request_url = URL( + f"/services/{urllib.parse.quote(service_key, safe='')}/{service_version}/specifications", + ).with_query(user_id=user_id) + return await self.client.get(f"{request_url}") diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py index 8544c0f38e6..65cf20bd20d 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py @@ -1,4 +1,7 @@ +from collections.abc import AsyncIterator + from fastapi import FastAPI +from fastapi_lifespan_manager import State from servicelib.deferred_tasks import DeferredManager from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisDatabase @@ -6,19 +9,15 @@ from .redis import get_redis_client -def setup_deferred_manager(app: FastAPI) -> None: - async def on_startup() -> None: - rabbit_settings: RabbitSettings = app.state.settings.DYNAMIC_SCHEDULER_RABBITMQ +async def lifespan_deferred_manager(app: FastAPI) -> AsyncIterator[State]: + rabbit_settings: RabbitSettings = app.state.settings.DYNAMIC_SCHEDULER_RABBITMQ - redis_client_sdk = get_redis_client(app, RedisDatabase.DEFERRED_TASKS) - app.state.deferred_manager = manager = DeferredManager( - rabbit_settings, redis_client_sdk, globals_context={"app": app} - ) - await manager.setup() + redis_client_sdk = get_redis_client(app, RedisDatabase.DEFERRED_TASKS) + app.state.deferred_manager = manager = DeferredManager( + rabbit_settings, redis_client_sdk, globals_context={"app": app} + ) + await manager.setup() - async def on_shutdown() -> None: - manager: DeferredManager = app.state.deferred_manager - await manager.shutdown() + yield {} - app.add_event_handler("startup", on_startup) - app.add_event_handler("shutdown", on_shutdown) + await manager.shutdown() diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/__init__.py index 46b11bfba38..85beb707352 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/__init__.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/__init__.py @@ -1,7 +1,7 @@ from ._public_client import DirectorV0PublicClient -from ._setup import setup_director_v0 +from ._setup import lifespan_director_v0 __all__: tuple[str, ...] = ( "DirectorV0PublicClient", - "setup_director_v0", + "lifespan_director_v0", ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/_setup.py index b58b272a221..90e48e9b3f5 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/_setup.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v0/_setup.py @@ -1,21 +1,22 @@ +from collections.abc import AsyncIterator + from fastapi import FastAPI +from fastapi_lifespan_manager import State from ._public_client import DirectorV0PublicClient from ._thin_client import DirectorV0ThinClient -def setup_director_v0(app: FastAPI) -> None: - async def _on_startup() -> None: - thin_client = DirectorV0ThinClient(app) - thin_client.set_to_app_state(app) - thin_client.attach_lifespan_to(app) +async def lifespan_director_v0(app: FastAPI) -> AsyncIterator[State]: + + thin_client = DirectorV0ThinClient(app) + thin_client.set_to_app_state(app) + thin_client.attach_lifespan_to(app) - public_client = DirectorV0PublicClient(app) - public_client.set_to_app_state(app) + public_client = DirectorV0PublicClient(app) + public_client.set_to_app_state(app) - async def _on_shutdown() -> None: - DirectorV0PublicClient.pop_from_app_state(app) - DirectorV0ThinClient.pop_from_app_state(app) + yield {} - app.add_event_handler("startup", _on_startup) - app.add_event_handler("shutdown", _on_shutdown) + DirectorV0PublicClient.pop_from_app_state(app) + DirectorV0ThinClient.pop_from_app_state(app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/__init__.py index 6d76547f3f2..25216a03f2f 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/__init__.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/__init__.py @@ -1,6 +1,6 @@ -from ._public_client import DirectorV2Client, setup_director_v2 +from ._public_client import DirectorV2Client, lifespan_director_v2 __all__: tuple[str, ...] = ( "DirectorV2Client", - "setup_director_v2", + "lifespan_director_v2", ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py index 57481f04a11..c771923cb05 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py @@ -1,7 +1,9 @@ import datetime +from collections.abc import AsyncIterator from typing import Any from fastapi import FastAPI, status +from fastapi_lifespan_manager import State from models_library.api_schemas_directorv2.dynamic_services import ( DynamicServiceGet, GetProjectInactivityResponse, @@ -141,6 +143,8 @@ async def update_projects_networks(self, *, project_id: ProjectID) -> None: await self.thin_client.patch_projects_networks(project_id=project_id) -def setup_director_v2(app: FastAPI) -> None: +async def lifespan_director_v2(app: FastAPI) -> AsyncIterator[State]: public_client = DirectorV2Client(app) public_client.set_to_app_state(app) + + yield {} diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py index 8cd33e12808..7daeeb7e2fc 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py @@ -1,7 +1,7 @@ from ._notifier import notify_service_status_change -from ._setup import setup_notifier +from ._setup import get_lifespans_notifier __all__: tuple[str, ...] = ( - "setup_notifier", + "get_lifespans_notifier", "notify_service_status_change", ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py index 0b8690a9676..c869a368ab2 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py @@ -1,8 +1,10 @@ import contextlib +from collections.abc import AsyncIterator import socketio # type: ignore[import-untyped] from fastapi import FastAPI from fastapi.encoders import jsonable_encoder +from fastapi_lifespan_manager import State from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.socketio import ( SOCKET_IO_SERVICE_STATUS_EVENT, @@ -37,19 +39,17 @@ async def notify_service_status_change( await notifier.notify_service_status(user_id=user_id, status=status) -def setup(app: FastAPI): - async def _on_startup() -> None: - assert app.state.external_socketio # nosec +async def lifespan(app: FastAPI) -> AsyncIterator[State]: - notifier = Notifier( - sio_manager=app.state.external_socketio, - ) - notifier.set_to_app_state(app) - assert Notifier.get_from_app_state(app) == notifier # nosec + assert app.state.external_socketio # nosec + + notifier = Notifier( + sio_manager=app.state.external_socketio, + ) + notifier.set_to_app_state(app) + assert Notifier.get_from_app_state(app) == notifier # nosec - async def _on_shutdown() -> None: - with contextlib.suppress(AttributeError): - Notifier.pop_from_app_state(app) + yield {} - app.add_event_handler("startup", _on_startup) - app.add_event_handler("shutdown", _on_shutdown) + with contextlib.suppress(AttributeError): + Notifier.pop_from_app_state(app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py index 1542afa8a87..5bd140959b2 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py @@ -1,8 +1,7 @@ -from fastapi import FastAPI +from servicelib.fastapi.lifespan_utils import LifespanGenerator from . import _notifier, _socketio -def setup_notifier(app: FastAPI): - _socketio.setup(app) - _notifier.setup(app) +def get_lifespans_notifier() -> list[LifespanGenerator]: + return [_socketio.lifespan, _notifier.lifespan] diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py index 2f0abfbd3af..f34f6b87f09 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py @@ -1,7 +1,9 @@ import logging +from collections.abc import AsyncIterator import socketio # type: ignore[import-untyped] from fastapi import FastAPI +from fastapi_lifespan_manager import State from servicelib.socketio_utils import cleanup_socketio_async_pubsub_manager from ...core.settings import ApplicationSettings @@ -9,24 +11,19 @@ _logger = logging.getLogger(__name__) -def setup(app: FastAPI): +async def lifespan(app: FastAPI) -> AsyncIterator[State]: settings: ApplicationSettings = app.state.settings - async def _on_startup() -> None: - assert app.state.rabbitmq_client # nosec + assert app.state.rabbitmq_client # nosec - # Connect to the as an external process in write-only mode - # SEE https://python-socketio.readthedocs.io/en/stable/server.html#emitting-from-external-processes - assert settings.DYNAMIC_SCHEDULER_RABBITMQ # nosec - app.state.external_socketio = socketio.AsyncAioPikaManager( - url=settings.DYNAMIC_SCHEDULER_RABBITMQ.dsn, logger=_logger, write_only=True - ) + # Connect to the as an external process in write-only mode + # SEE https://python-socketio.readthedocs.io/en/stable/server.html#emitting-from-external-processes + assert settings.DYNAMIC_SCHEDULER_RABBITMQ # nosec + app.state.external_socketio = socketio.AsyncAioPikaManager( + url=settings.DYNAMIC_SCHEDULER_RABBITMQ.dsn, logger=_logger, write_only=True + ) - async def _on_shutdown() -> None: - if external_socketio := getattr(app.state, "external_socketio"): # noqa: B009 - await cleanup_socketio_async_pubsub_manager( - server_manager=external_socketio - ) + yield {} - app.add_event_handler("startup", _on_startup) - app.add_event_handler("shutdown", _on_shutdown) + if external_socketio := getattr(app.state, "external_socketio"): # noqa: B009 + await cleanup_socketio_async_pubsub_manager(server_manager=external_socketio) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/rabbitmq.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/rabbitmq.py index b7b3d30425c..4f555b8e5f5 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/rabbitmq.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/rabbitmq.py @@ -1,6 +1,8 @@ +from collections.abc import AsyncIterator from typing import cast from fastapi import FastAPI +from fastapi_lifespan_manager import State from models_library.rabbitmq_messages import RabbitMessageBase from servicelib.rabbitmq import ( RabbitMQClient, @@ -10,34 +12,26 @@ from settings_library.rabbit import RabbitSettings -def setup_rabbitmq(app: FastAPI) -> None: +async def lifespan_rabbitmq(app: FastAPI) -> AsyncIterator[State]: settings: RabbitSettings = app.state.settings.DYNAMIC_SCHEDULER_RABBITMQ - app.state.rabbitmq_client = None - app.state.rabbitmq_rpc_server = None - - async def _on_startup() -> None: - await wait_till_rabbitmq_responsive(settings.dsn) - - app.state.rabbitmq_client = RabbitMQClient( - client_name="dynamic_scheduler", settings=settings - ) - app.state.rabbitmq_rpc_client = await RabbitMQRPCClient.create( - client_name="dynamic_scheduler_rpc_client", settings=settings - ) - app.state.rabbitmq_rpc_server = await RabbitMQRPCClient.create( - client_name="dynamic_scheduler_rpc_server", settings=settings - ) - - async def _on_shutdown() -> None: - if app.state.rabbitmq_client: - await app.state.rabbitmq_client.close() - if app.state.rabbitmq_rpc_client: - await app.state.rabbitmq_rpc_client.close() - if app.state.rabbitmq_rpc_server: - await app.state.rabbitmq_rpc_server.close() - - app.add_event_handler("startup", _on_startup) - app.add_event_handler("shutdown", _on_shutdown) + + await wait_till_rabbitmq_responsive(settings.dsn) + + app.state.rabbitmq_client = RabbitMQClient( + client_name="dynamic_scheduler", settings=settings + ) + app.state.rabbitmq_rpc_client = await RabbitMQRPCClient.create( + client_name="dynamic_scheduler_rpc_client", settings=settings + ) + app.state.rabbitmq_rpc_server = await RabbitMQRPCClient.create( + client_name="dynamic_scheduler_rpc_server", settings=settings + ) + + yield {} + + await app.state.rabbitmq_client.close() + await app.state.rabbitmq_rpc_client.close() + await app.state.rabbitmq_rpc_server.close() def get_rabbitmq_client(app: FastAPI) -> RabbitMQClient: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py index c6f98d9e49e..2640218bee7 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py @@ -1,6 +1,8 @@ +from collections.abc import AsyncIterator from typing import Final from fastapi import FastAPI +from fastapi_lifespan_manager import State from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase, RedisSettings @@ -18,30 +20,22 @@ _ALL_REDIS_DATABASES: Final[set[RedisDatabase]] = _DECODE_DBS | _BINARY_DBS -def setup_redis(app: FastAPI) -> None: +async def lifespan_redis(app: FastAPI) -> AsyncIterator[State]: settings: RedisSettings = app.state.settings.DYNAMIC_SCHEDULER_REDIS - async def on_startup() -> None: - app.state.redis_clients_manager = manager = RedisClientsManager( - { - RedisManagerDBConfig(database=x, decode_responses=False) - for x in _BINARY_DBS - } - | { - RedisManagerDBConfig(database=x, decode_responses=True) - for x in _DECODE_DBS - }, - settings, - client_name=APP_NAME, - ) - await manager.setup() - - async def on_shutdown() -> None: - manager: RedisClientsManager = app.state.redis_clients_manager - await manager.shutdown() - - app.add_event_handler("startup", on_startup) - app.add_event_handler("shutdown", on_shutdown) + app.state.redis_clients_manager = manager = RedisClientsManager( + {RedisManagerDBConfig(database=x, decode_responses=False) for x in _BINARY_DBS} + | { + RedisManagerDBConfig(database=x, decode_responses=True) for x in _DECODE_DBS + }, + settings, + client_name=APP_NAME, + ) + await manager.setup() + + yield {} + + await manager.shutdown() def get_redis_client(app: FastAPI, database: RedisDatabase) -> RedisClientSDK: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py index e4cf7e50705..58141505a6d 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py @@ -13,12 +13,13 @@ should_notify_frontend_for_service, ) from ._models import TrackedServiceModel -from ._setup import setup_service_tracker +from ._setup import lifespan_service_tracker __all__: tuple[str, ...] = ( "get_all_tracked_services", "get_tracked_service", "get_user_id_for_service", + "lifespan_service_tracker", "NORMAL_RATE_POLL_INTERVAL", "remove_tracked_service", "set_frontend_notified_for_service", @@ -27,7 +28,6 @@ "set_request_as_stopped", "set_service_scheduled_to_run", "set_service_status_task_uid", - "setup_service_tracker", "should_notify_frontend_for_service", "TrackedServiceModel", ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py index 40a47bb8bec..45da842e985 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py @@ -1,17 +1,18 @@ +from collections.abc import AsyncIterator + from fastapi import FastAPI +from fastapi_lifespan_manager import State from settings_library.redis import RedisDatabase from ..redis import get_redis_client from ._tracker import Tracker -def setup_service_tracker(app: FastAPI) -> None: - async def on_startup() -> None: - app.state.service_tracker = Tracker( - get_redis_client(app, RedisDatabase.DYNAMIC_SERVICES) - ) - - app.add_event_handler("startup", on_startup) +async def lifespan_service_tracker(app: FastAPI) -> AsyncIterator[State]: + app.state.service_tracker = Tracker( + get_redis_client(app, RedisDatabase.DYNAMIC_SERVICES) + ) + yield {} def get_tracker(app: FastAPI) -> Tracker: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py index 26345124325..86c116f704d 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py @@ -1,3 +1,3 @@ -from ._setup import setup_status_monitor +from ._setup import lifespan_status_monitor -__all__: tuple[str, ...] = ("setup_status_monitor",) +__all__: tuple[str, ...] = ("lifespan_status_monitor",) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py index 8f9601464bc..177300d1b83 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py @@ -1,26 +1,24 @@ +from collections.abc import AsyncIterator from datetime import timedelta from typing import Final from fastapi import FastAPI +from fastapi_lifespan_manager import State from ._monitor import Monitor _STATUS_WORKER_DEFAULT_INTERVAL: Final[timedelta] = timedelta(seconds=1) -def setup_status_monitor(app: FastAPI) -> None: - async def on_startup() -> None: - app.state.status_monitor = monitor = Monitor( - app, status_worker_interval=_STATUS_WORKER_DEFAULT_INTERVAL - ) - await monitor.setup() +async def lifespan_status_monitor(app: FastAPI) -> AsyncIterator[State]: + app.state.status_monitor = monitor = Monitor( + app, status_worker_interval=_STATUS_WORKER_DEFAULT_INTERVAL + ) + await monitor.setup() - async def on_shutdown() -> None: - monitor: Monitor = app.state.status_monitor - await monitor.shutdown() + yield {} - app.add_event_handler("startup", on_startup) - app.add_event_handler("shutdown", on_shutdown) + await monitor.shutdown() def get_monitor(app: FastAPI) -> Monitor: diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index 1c4760a1659..5b2ed7db361 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -6,6 +6,7 @@ from pathlib import Path from typing import Final +import nicegui import pytest import simcore_service_dynamic_scheduler import yaml @@ -25,6 +26,7 @@ "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", + "pytest_simcore.faker_users_data", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", "pytest_simcore.repository_paths", @@ -85,34 +87,34 @@ def app_environment( @pytest.fixture -def disable_rabbitmq_setup(mocker: MockerFixture) -> None: - mocker.patch(f"{_PATH_APPLICATION}.setup_rabbitmq") - mocker.patch(f"{_PATH_APPLICATION}.setup_rpc_api_routes") +def disable_rabbitmq_lifespan(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.lifespan_rabbitmq") + mocker.patch(f"{_PATH_APPLICATION}.lifespan_rpc_api_routes") @pytest.fixture -def disable_redis_setup(mocker: MockerFixture) -> None: - mocker.patch(f"{_PATH_APPLICATION}.setup_redis") +def disable_redis_lifespan(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.lifespan_redis") @pytest.fixture -def disable_service_tracker_setup(mocker: MockerFixture) -> None: - mocker.patch(f"{_PATH_APPLICATION}.setup_service_tracker") +def disable_service_tracker_lifespan(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.lifespan_service_tracker") @pytest.fixture -def disable_deferred_manager_setup(mocker: MockerFixture) -> None: - mocker.patch(f"{_PATH_APPLICATION}.setup_deferred_manager") +def disable_deferred_manager_lifespan(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.lifespan_deferred_manager") @pytest.fixture -def disable_notifier_setup(mocker: MockerFixture) -> None: - mocker.patch(f"{_PATH_APPLICATION}.setup_notifier") +def disable_notifier_lifespan(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.get_lifespans_notifier") @pytest.fixture -def disable_status_monitor_setup(mocker: MockerFixture) -> None: - mocker.patch(f"{_PATH_APPLICATION}.setup_status_monitor") +def disable_status_monitor_lifespan(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.lifespan_status_monitor") MAX_TIME_FOR_APP_TO_STARTUP: Final[float] = 10 @@ -123,6 +125,9 @@ def disable_status_monitor_setup(mocker: MockerFixture) -> None: async def app( app_environment: EnvVarsDict, is_pdb_enabled: bool ) -> AsyncIterator[FastAPI]: + # forces rebuild of middleware stack on next test + nicegui.app.user_middleware.clear() + nicegui.app.middleware_stack = None test_app = create_app() async with LifespanManager( test_app, diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py b/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py index be92830ee54..62f0ea0a2f3 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py @@ -8,6 +8,7 @@ from typing import Final from unittest.mock import AsyncMock +import nicegui import pytest from fastapi import FastAPI, status from httpx import AsyncClient @@ -19,8 +20,8 @@ from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings from settings_library.utils_service import DEFAULT_FASTAPI_PORT -from simcore_service_dynamic_scheduler.api.frontend._utils import get_settings from simcore_service_dynamic_scheduler.core.application import create_app +from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings from tenacity import AsyncRetrying, stop_after_delay, wait_fixed _MODULE: Final["str"] = "simcore_service_dynamic_scheduler" @@ -69,6 +70,9 @@ def server_host_port() -> str: @pytest.fixture def not_initialized_app(app_environment: EnvVarsDict) -> FastAPI: + # forces rebuild of middleware stack on next test + nicegui.app.user_middleware.clear() + nicegui.app.middleware_stack = None return create_app() @@ -93,8 +97,10 @@ async def _run_server() -> None: server_task = asyncio.create_task(_run_server()) + settings: ApplicationSettings = not_initialized_app.state.settings + home_page_url = ( - f"http://{server_host_port}{get_settings().DYNAMIC_SCHEDULER_UI_MOUNT_PATH}" + f"http://{server_host_port}{settings.DYNAMIC_SCHEDULER_UI_MOUNT_PATH}" ) async for attempt in AsyncRetrying( reraise=True, wait=wait_fixed(0.1), stop=stop_after_delay(2) diff --git a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py index c3d585b00af..d7fbda477ff 100644 --- a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py +++ b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py @@ -11,12 +11,12 @@ @pytest.fixture def app_environment( - disable_rabbitmq_setup: None, - disable_redis_setup: None, - disable_service_tracker_setup: None, - disable_deferred_manager_setup: None, - disable_notifier_setup: None, - disable_status_monitor_setup: None, + disable_rabbitmq_lifespan: None, + disable_redis_lifespan: None, + disable_service_tracker_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, + disable_status_monitor_lifespan: None, app_environment: EnvVarsDict, ) -> EnvVarsDict: return app_environment diff --git a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py index cb7939c5824..42bc7396c9c 100644 --- a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py +++ b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py @@ -48,8 +48,17 @@ def mock_redis_client( ) +@pytest.fixture +def mock_docker_api_proxy(mocker: MockerFixture, docker_api_proxy_ok: bool) -> None: + base_path = "simcore_service_dynamic_scheduler.api.rest._health" + mocker.patch( + f"{base_path}.is_docker_api_proxy_ready", return_value=docker_api_proxy_ok + ) + + @pytest.fixture def app_environment( + mock_docker_api_proxy: None, mock_rabbitmq_clients: None, mock_redis_client: None, app_environment: EnvVarsDict, @@ -58,12 +67,13 @@ def app_environment( @pytest.mark.parametrize( - "rabbit_client_ok, rabbit_rpc_server_ok, redis_client_ok, is_ok", + "rabbit_client_ok, rabbit_rpc_server_ok, redis_client_ok,, docker_api_proxy_ok, is_ok", [ - pytest.param(True, True, True, True, id="ok"), - pytest.param(False, True, True, False, id="rabbit_client_bad"), - pytest.param(True, False, True, False, id="rabbit_rpc_server_bad"), - pytest.param(True, True, False, False, id="redis_client_bad"), + pytest.param(True, True, True, True, True, id="ok"), + pytest.param(False, True, True, True, False, id="rabbit_client_bad"), + pytest.param(True, False, True, True, False, id="rabbit_rpc_server_bad"), + pytest.param(True, True, False, True, False, id="redis_client_bad"), + pytest.param(True, True, True, False, False, id="docker_api_proxy_bad"), ], ) async def test_health(client: AsyncClient, is_ok: bool): diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py index ab89f54e861..b8d385089f3 100644 --- a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py @@ -52,10 +52,10 @@ @pytest.fixture def app_environment( - disable_rabbitmq_setup: None, - disable_deferred_manager_setup: None, - disable_notifier_setup: None, - disable_status_monitor_setup: None, + disable_rabbitmq_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, + disable_status_monitor_lifespan: None, app_environment: EnvVarsDict, redis_service: RedisSettings, remove_redis_data: None, diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py index f1c29a3d3f7..8ad52fd1f9c 100644 --- a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py @@ -36,9 +36,9 @@ def disable_monitor_task(mocker: MockerFixture) -> None: @pytest.fixture def app_environment( disable_monitor_task: None, - disable_rabbitmq_setup: None, - disable_deferred_manager_setup: None, - disable_notifier_setup: None, + disable_rabbitmq_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, app_environment: EnvVarsDict, redis_service: RedisSettings, remove_redis_data: None, diff --git a/services/dynamic-scheduler/tests/unit/test__model_examples.py b/services/dynamic-scheduler/tests/unit/test__model_examples.py index e768927cfe4..98b04cc2996 100644 --- a/services/dynamic-scheduler/tests/unit/test__model_examples.py +++ b/services/dynamic-scheduler/tests/unit/test__model_examples.py @@ -1,10 +1,12 @@ -import json from typing import Any import pytest import simcore_service_dynamic_scheduler.models -from pydantic import BaseModel, TypeAdapter, ValidationError -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pydantic import BaseModel +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) @pytest.mark.parametrize( @@ -12,11 +14,8 @@ walk_model_examples_in_package(simcore_service_dynamic_scheduler.models), ) def test_api_server_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - try: - assert TypeAdapter(model_cls).validate_python(example_data) is not None - except ValidationError as err: - pytest.fail( - f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" - ) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/services/dynamic-scheduler/tests/unit/test_services_catalog.py b/services/dynamic-scheduler/tests/unit/test_services_catalog.py new file mode 100644 index 00000000000..c618766cccd --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/test_services_catalog.py @@ -0,0 +1,118 @@ +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument + + +import urllib.parse +from collections.abc import Iterator + +import pytest +import respx +from fastapi import FastAPI +from models_library.api_schemas_catalog.services_specifications import ( + ServiceSpecifications, +) +from models_library.service_settings_labels import SimcoreServiceLabels +from models_library.services import ServiceKey, ServiceVersion +from models_library.users import UserID +from pydantic import TypeAdapter +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_dynamic_scheduler.services.catalog import CatalogPublicClient + + +@pytest.fixture +def app_environment( + disable_redis_lifespan: None, + disable_rabbitmq_lifespan: None, + disable_service_tracker_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, + disable_status_monitor_lifespan: None, + app_environment: EnvVarsDict, +) -> EnvVarsDict: + return app_environment + + +@pytest.fixture +def simcore_service_labels() -> SimcoreServiceLabels: + return TypeAdapter(SimcoreServiceLabels).validate_python( + SimcoreServiceLabels.model_json_schema()["examples"][1] + ) + + +@pytest.fixture +def service_specifications() -> ServiceSpecifications: + return TypeAdapter(ServiceSpecifications).validate_python({}) + + +@pytest.fixture +def service_version() -> ServiceVersion: + return "1.0.0" + + +@pytest.fixture +def service_key() -> ServiceKey: + return "simcore/services/dynamic/test" + + +@pytest.fixture +def mock_catalog( + app: FastAPI, + user_id: UserID, + service_key: ServiceKey, + service_version: ServiceVersion, + simcore_service_labels: SimcoreServiceLabels, + service_specifications: ServiceSpecifications, +) -> Iterator[None]: + with respx.mock( + base_url=app.state.settings.DYNAMIC_SCHEDULER_CATALOG_SETTINGS.api_base_url, + assert_all_called=False, + assert_all_mocked=True, # IMPORTANT: KEEP always True! + ) as respx_mock: + respx_mock.get( + f"/services/{urllib.parse.quote_plus(service_key)}/{service_version}/labels", + name="service labels", + ).respond( + status_code=200, + json=simcore_service_labels.model_dump(mode="json"), + ) + + respx_mock.get( + f"/services/{urllib.parse.quote_plus(service_key)}/{service_version}/specifications?user_id={user_id}", + name="service specifications", + ).respond( + status_code=200, + json=service_specifications.model_dump(mode="json"), + ) + + yield + + +async def test_get_services_labels( + mock_catalog: None, + app: FastAPI, + service_key: ServiceKey, + service_version: ServiceVersion, + simcore_service_labels: SimcoreServiceLabels, +): + client = CatalogPublicClient.get_from_app_state(app) + result = await client.get_services_labels(service_key, service_version) + assert result.model_dump(mode="json") == simcore_service_labels.model_dump( + mode="json" + ) + + +async def test_get_services_specifications( + mock_catalog: None, + app: FastAPI, + user_id: UserID, + service_key: ServiceKey, + service_version: ServiceVersion, + service_specifications: ServiceSpecifications, +): + client = CatalogPublicClient.get_from_app_state(app) + result = await client.get_services_specifications( + user_id, service_key, service_version + ) + assert result.model_dump(mode="json") == service_specifications.model_dump( + mode="json" + ) diff --git a/services/dynamic-scheduler/tests/unit/test_services_director_v0.py b/services/dynamic-scheduler/tests/unit/test_services_director_v0.py index 0c2db384c25..0900ed3622a 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_director_v0.py +++ b/services/dynamic-scheduler/tests/unit/test_services_director_v0.py @@ -10,7 +10,6 @@ from models_library.api_schemas_directorv2.dynamic_services_service import ( RunningDynamicServiceDetails, ) -from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from pydantic import TypeAdapter from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -21,21 +20,19 @@ @pytest.fixture def app_environment( - disable_redis_setup: None, - disable_rabbitmq_setup: None, - disable_service_tracker_setup: None, - disable_deferred_manager_setup: None, - disable_notifier_setup: None, - disable_status_monitor_setup: None, + disable_redis_lifespan: None, + disable_rabbitmq_lifespan: None, + disable_service_tracker_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, + disable_status_monitor_lifespan: None, app_environment: EnvVarsDict, ) -> EnvVarsDict: return app_environment @pytest.fixture -def legacy_service_details( - node_id: NodeID, project_id: ProjectID -) -> RunningDynamicServiceDetails: +def legacy_service_details() -> RunningDynamicServiceDetails: return TypeAdapter(RunningDynamicServiceDetails).validate_python( RunningDynamicServiceDetails.model_json_schema()["examples"][0] ) diff --git a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py index eadb7c9ee03..12c355162c0 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py +++ b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py @@ -20,11 +20,11 @@ @pytest.fixture def app_environment( - disable_redis_setup: None, - disable_service_tracker_setup: None, - disable_deferred_manager_setup: None, - disable_notifier_setup: None, - disable_status_monitor_setup: None, + disable_redis_lifespan: None, + disable_service_tracker_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, + disable_status_monitor_lifespan: None, app_environment: EnvVarsDict, rabbit_service: RabbitSettings, ) -> EnvVarsDict: diff --git a/services/dynamic-scheduler/tests/unit/test_services_redis.py b/services/dynamic-scheduler/tests/unit/test_services_redis.py index 059a17aeb0f..be4952fbea6 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_redis.py +++ b/services/dynamic-scheduler/tests/unit/test_services_redis.py @@ -15,10 +15,10 @@ @pytest.fixture def app_environment( - disable_rabbitmq_setup: None, - disable_deferred_manager_setup: None, - disable_notifier_setup: None, - disable_status_monitor_setup: None, + disable_rabbitmq_lifespan: None, + disable_deferred_manager_lifespan: None, + disable_notifier_lifespan: None, + disable_status_monitor_lifespan: None, app_environment: EnvVarsDict, redis_service: RedisSettings, ) -> EnvVarsDict: diff --git a/services/dynamic-sidecar/docker/boot.sh b/services/dynamic-sidecar/docker/boot.sh index d21e87cb070..b68f0ae22ba 100755 --- a/services/dynamic-sidecar/docker/boot.sh +++ b/services/dynamic-sidecar/docker/boot.sh @@ -25,7 +25,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then # NOTE: uv does not like this requirement file... cd /devel/services/dynamic-sidecar - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" pip list | sed 's/^/ /' @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/dynamic-sidecar/openapi.json b/services/dynamic-sidecar/openapi.json index cccb9924cdc..442005a03c1 100644 --- a/services/dynamic-sidecar/openapi.json +++ b/services/dynamic-sidecar/openapi.json @@ -229,95 +229,6 @@ } } }, - "/v1/containers/{id}/logs": { - "get": { - "tags": [ - "containers" - ], - "summary": "Get Container Logs", - "description": "Returns the logs of a given container if found", - "operationId": "get_container_logs_v1_containers__id__logs_get", - "parameters": [ - { - "name": "id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "title": "Id" - } - }, - { - "name": "since", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "title": "Timestamp", - "description": "Only return logs since this time, as a UNIX timestamp", - "default": 0 - }, - "description": "Only return logs since this time, as a UNIX timestamp" - }, - { - "name": "until", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "title": "Timestamp", - "description": "Only return logs before this time, as a UNIX timestamp", - "default": 0 - }, - "description": "Only return logs before this time, as a UNIX timestamp" - }, - { - "name": "timestamps", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "title": "Display timestamps", - "description": "Enabling this parameter will include timestamps in logs", - "default": false - }, - "description": "Enabling this parameter will include timestamps in logs" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "string" - }, - "title": "Response Get Container Logs V1 Containers Id Logs Get" - } - } - } - }, - "404": { - "description": "Container does not exists" - }, - "500": { - "description": "Errors in container" - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, "/v1/containers/name": { "get": { "tags": [ diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index f1ff8805767..493485a9d6f 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.0 +aio-pika==9.5.5 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -23,9 +23,9 @@ aiofiles==24.1.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.5.0 # via aiohttp -aiohttp==3.11.7 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -63,15 +63,15 @@ aioprocessing==2.0.1 # via -r requirements/_base.in aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp -alembic==1.14.0 +alembic==1.15.1 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in annotated-types==0.7.0 # via pydantic -anyio==4.6.2.post1 +anyio==4.8.0 # via # fast-depends # faststream @@ -92,14 +92,14 @@ async-timeout==4.0.3 # via aiopg asyncpg==0.30.0 # via sqlalchemy -attrs==24.2.0 +attrs==25.1.0 # via # aiohttp # jsonschema # referencing bidict==0.23.1 # via python-socketio -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -132,13 +132,13 @@ certifi==2024.8.30 # httpcore # httpx # requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests -click==8.1.7 +click==8.1.8 # via # typer # uvicorn -deprecated==1.2.15 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -152,11 +152,14 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.5 +fastapi==0.115.11 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in -faststream==0.5.31 + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +faststream==0.5.35 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -168,13 +171,13 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.69.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http greenlet==3.1.1 # via sqlalchemy -grpcio==1.68.0 +grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via @@ -183,7 +186,7 @@ h11==0.14.0 # wsproto httpcore==1.0.7 # via httpx -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -232,7 +235,7 @@ jsonschema==4.23.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.6 +mako==1.3.9 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -273,7 +276,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.2 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -291,19 +294,19 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-asgi @@ -314,44 +317,44 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aiopg==0.49b2 +opentelemetry-instrumentation-aiopg==0.51b0 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.51b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.49b2 +opentelemetry-instrumentation-asyncpg==0.51b0 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-dbapi==0.49b2 +opentelemetry-instrumentation-dbapi==0.51b0 # via opentelemetry-instrumentation-aiopg -opentelemetry-instrumentation-fastapi==0.49b2 +opentelemetry-instrumentation-fastapi==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.51b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -362,13 +365,13 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.51b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests -orjson==3.10.12 +orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -426,21 +429,21 @@ pint==0.24.4 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in platformdirs==4.3.6 # via pint -prometheus-client==0.21.0 +prometheus-client==0.21.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # prometheus-fastapi-instrumentator -prometheus-fastapi-instrumentator==7.0.0 +prometheus-fastapi-instrumentator==7.0.2 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -propcache==0.2.0 +propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.28.3 +protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.0 +psutil==7.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -449,7 +452,9 @@ psycopg2-binary==2.9.10 # via # aiopg # sqlalchemy -pydantic==2.10.2 +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -511,9 +516,9 @@ pydantic==2.10.2 # fastapi # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.2 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -533,8 +538,36 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -543,9 +576,9 @@ pydantic-settings==2.6.1 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in -pygments==2.18.0 +pygments==2.19.1 # via rich -pyinstrument==5.0.0 +pyinstrument==5.0.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -553,11 +586,11 @@ python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 # via pydantic-settings -python-engineio==4.10.1 +python-engineio==4.11.2 # via python-socketio python-magic==0.4.27 # via -r requirements/_base.in -python-socketio==5.11.4 +python-socketio==5.12.1 # via -r requirements/_base.in pyyaml==6.0.2 # via @@ -626,6 +659,34 @@ redis==5.2.1 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -637,7 +698,7 @@ rich==13.9.4 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.21.0 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -645,12 +706,10 @@ shellingham==1.5.4 # via typer simple-websocket==1.1.0 # via python-engineio -six==1.16.0 +six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx + # via anyio sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -685,7 +744,7 @@ sqlalchemy==1.4.54 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic -starlette==0.41.3 +starlette==0.46.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -717,6 +776,10 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -731,18 +794,19 @@ tqdm==4.67.1 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.13.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in -types-python-dateutil==2.9.0.20241003 +types-python-dateutil==2.9.0.20241206 # via arrow typing-extensions==4.12.2 # via # aiodebug # alembic + # anyio # fastapi # faststream # flexcache @@ -755,7 +819,7 @@ typing-extensions==4.12.2 # typer u-msgpack-python==2.8.0 # via -r requirements/_base.in -urllib3==2.2.3 +urllib3==2.3.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -786,13 +850,13 @@ urllib3==2.2.3 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests -uvicorn==0.32.1 +uvicorn==0.34.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in watchdog==6.0.0 # via -r requirements/_base.in -wrapt==1.17.0 +wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation @@ -802,7 +866,7 @@ wrapt==1.17.0 # opentelemetry-instrumentation-redis wsproto==1.2.0 # via simple-websocket -yarl==1.18.0 +yarl==1.18.3 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index c99609e166a..cdf3c443940 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -6,18 +6,18 @@ aiofiles==24.1.0 # via # -c requirements/_base.txt # aioboto3 -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.5.0 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.7 +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aiobotocore aioitertools==0.12.0 # via aiobotocore -aiosignal==1.3.1 +aiosignal==1.3.2 # via # -c requirements/_base.txt # aiohttp @@ -25,7 +25,7 @@ asgi-lifespan==2.1.0 # via -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in -attrs==24.2.0 +attrs==25.1.0 # via # -c requirements/_base.txt # aiohttp @@ -38,20 +38,20 @@ botocore==1.35.81 # aiobotocore # boto3 # s3transfer -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -coverage==7.6.10 +coverage==7.6.12 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==35.0.0 +faker==36.2.2 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -81,7 +81,7 @@ multidict==6.1.0 # aiohttp # async-asgi-testclient # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -91,12 +91,12 @@ packaging==24.2 # pytest pluggy==1.5.0 # via pytest -propcache==0.2.0 +propcache==0.3.0 # via # -c requirements/_base.txt # aiohttp # yarl -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -114,7 +114,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker python-dotenv==1.0.1 # via # -c requirements/_base.txt @@ -126,7 +125,7 @@ requests==2.32.3 # docker s3transfer==0.10.4 # via boto3 -six==1.16.0 +six==1.17.0 # via # -c requirements/_base.txt # python-dateutil @@ -141,33 +140,34 @@ sqlalchemy==1.4.54 # -r requirements/_test.in sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -types-aiobotocore-s3==2.19.0 +types-aiobotocore-s3==2.21.1 # via -r requirements/_test.in types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -types-psutil==6.1.0.20241221 +types-psutil==7.0.0.20250218 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker # mypy # sqlalchemy2-stubs # types-aiobotocore-s3 -urllib3==2.2.3 +tzdata==2025.1 + # via faker +urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # botocore # docker # requests -wrapt==1.17.0 +wrapt==1.17.2 # via # -c requirements/_base.txt # aiobotocore -yarl==1.18.0 +yarl==1.18.3 # via # -c requirements/_base.txt # aiohttp diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt index 57396df5036..404d7858eca 100644 --- a/services/dynamic-sidecar/requirements/_tools.txt +++ b/services/dynamic-sidecar/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # black @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -46,7 +46,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -58,7 +58,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -69,9 +69,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -80,7 +80,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.3 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py index 2a6ffae06ee..6ada9de83de 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py @@ -199,54 +199,6 @@ async def get_containers_activity( # -@router.get( - "/containers/{id}/logs", - responses={ - status.HTTP_404_NOT_FOUND: { - "description": "Container does not exists", - }, - status.HTTP_500_INTERNAL_SERVER_ERROR: {"description": "Errors in container"}, - }, -) -@cancel_on_disconnect -async def get_container_logs( - request: Request, - shared_store: Annotated[SharedStore, Depends(get_shared_store)], - container_id: str = PathParam(..., alias="id"), - since: int = Query( - default=0, - title="Timestamp", - description="Only return logs since this time, as a UNIX timestamp", - ), - until: int = Query( - default=0, - title="Timestamp", - description="Only return logs before this time, as a UNIX timestamp", - ), - timestamps: bool = Query( # noqa: FBT001 - default=False, - title="Display timestamps", - description="Enabling this parameter will include timestamps in logs", - ), -) -> list[str]: - """Returns the logs of a given container if found""" - _ = request - - _raise_if_container_is_missing(container_id, shared_store.container_names) - - async with docker_client() as docker: - container_instance = await docker.containers.get(container_id) - - args = {"stdout": True, "stderr": True, "since": since, "until": until} - if timestamps: - args["timestamps"] = True - - container_logs: list[str] = await container_instance.log( - **args - ) # type:ignore[call-overload] - return container_logs - - @router.get( "/containers/name", responses={ diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py index 981edd42f7a..af857013a82 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py @@ -124,6 +124,7 @@ async def runs_docker_compose_down_task( settings: Annotated[ApplicationSettings, Depends(get_settings)], shared_store: Annotated[SharedStore, Depends(get_shared_store)], app: Annotated[FastAPI, Depends(get_application)], + mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], ) -> TaskId: assert request # nosec @@ -135,6 +136,7 @@ async def runs_docker_compose_down_task( app=app, shared_store=shared_store, settings=settings, + mounted_volumes=mounted_volumes, ) except TaskAlreadyRunningError as e: return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py index 76d663383f6..e895c3db122 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py @@ -1,8 +1,8 @@ import asyncio import json import logging +from collections.abc import AsyncIterator from contextlib import asynccontextmanager -from typing import AsyncIterator import typer from fastapi import FastAPI @@ -17,7 +17,11 @@ from .modules.outputs import OutputsManager, setup_outputs log = logging.getLogger(__name__) -main = typer.Typer(name=PROJECT_NAME) +main = typer.Typer( + name=PROJECT_NAME, + pretty_exceptions_enable=False, + pretty_exceptions_show_locals=False, +) main.command()(create_settings_command(settings_cls=ApplicationSettings, logger=log)) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index 58c5495563f..22e8c4729ed 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -9,7 +9,7 @@ get_common_oas_options, override_fastapi_openapi_method, ) -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from servicelib.logging_utils import config_all_loggers from simcore_sdk.node_ports_common.exceptions import NodeNotFound @@ -193,7 +193,9 @@ def create_app(): setup_prometheus_metrics(app) if application_settings.DYNAMIC_SIDECAR_TRACING: - setup_tracing(app, application_settings.DYNAMIC_SIDECAR_TRACING, PROJECT_NAME) + initialize_tracing( + app, application_settings.DYNAMIC_SIDECAR_TRACING, PROJECT_NAME + ) # ERROR HANDLERS ------------ app.add_exception_handler( diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py index 1033ec53f37..668ba0db91f 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py @@ -1,15 +1,15 @@ """ - BackgroundLogFetcher: - Creates background task that - reads every line of a container's log and - posts it as a message to rabbit's log channel (logger) +BackgroundLogFetcher: + Creates background task that + reads every line of a container's log and + posts it as a message to rabbit's log channel (logger) """ - import logging from asyncio import CancelledError, Task, create_task +from collections.abc import AsyncGenerator, Callable, Coroutine from contextlib import suppress -from typing import Any, AsyncGenerator, Callable, Coroutine, cast +from typing import Any, cast from aiodocker import DockerError from fastapi import FastAPI @@ -85,7 +85,9 @@ async def stop_log_fetching(self, container_name: str) -> None: return task.cancel() - with suppress(CancelledError): + + # NOTE: sometime the docker engine causes a TimeoutError after the task is cancelled + with suppress(CancelledError, TimeoutError): await task logger.debug("Logs fetching stopped for container '%s'", container_name) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py index 6e7a7a19009..83458fd9d1e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py @@ -10,7 +10,6 @@ ProgressPercent, TaskProgress, ) -from models_library.basic_types import IDStr from models_library.generated_models.docker_rest_api import ContainerState from models_library.rabbitmq_messages import ProgressType, SimcorePlatformStatus from pydantic import PositiveInt @@ -54,7 +53,10 @@ from ..modules.mounted_fs import MountedVolumes from ..modules.notifications._notifications_ports import PortNotifier from ..modules.outputs import OutputsManager, event_propagation_disabled -from .long_running_tasksutils import run_before_shutdown_actions +from .long_running_tasks_utils import ( + ensure_read_permissions_on_user_service_data, + run_before_shutdown_actions, +) from .resource_tracking import send_service_started, send_service_stopped _logger = logging.getLogger(__name__) @@ -178,7 +180,7 @@ async def task_create_service_containers( app, ProgressType.SERVICE_CONTAINERS_STARTING, ), - description=IDStr("starting software"), + description="starting software", ) as progress_bar: with log_context(_logger, logging.INFO, "load user services preferences"): if user_services_preferences.is_feature_enabled(app): @@ -238,6 +240,7 @@ async def task_runs_docker_compose_down( app: FastAPI, shared_store: SharedStore, settings: ApplicationSettings, + mounted_volumes: MountedVolumes, ) -> None: if shared_store.compose_spec is None: _logger.warning("No compose-spec was found") @@ -312,6 +315,9 @@ async def _send_resource_tracking_stop(platform_status: SimcorePlatformStatus): # NOTE: https://github.com/ITISFoundation/osparc-simcore/issues/4952 await _send_resource_tracking_stop(SimcorePlatformStatus.OK) raise + finally: + with log_context(_logger, logging.INFO, "ensure read permissions"): + await ensure_read_permissions_on_user_service_data(mounted_volumes) await _send_resource_tracking_stop(SimcorePlatformStatus.OK) @@ -382,7 +388,7 @@ async def task_restore_state( app, ProgressType.SERVICE_STATE_PULLING, ), - description=IDStr("pulling states"), + description="pulling states", ) as root_progress: await logged_gather( *( @@ -446,7 +452,7 @@ async def task_save_state( app, ProgressType.SERVICE_STATE_PUSHING, ), - description=IDStr("pushing state"), + description="pushing state", ) as root_progress: await logged_gather( *[ @@ -494,7 +500,7 @@ async def task_ports_inputs_pull( app, ProgressType.SERVICE_INPUTS_PULLING, ), - description=IDStr("pulling inputs"), + description="pulling inputs", ) as root_progress: with log_directory_changes( mounted_volumes.disk_inputs_path, _logger, logging.INFO @@ -539,7 +545,7 @@ async def task_ports_outputs_pull( app, ProgressType.SERVICE_OUTPUTS_PULLING, ), - description=IDStr("pulling outputs"), + description="pulling outputs", ) as root_progress: transferred_bytes = await nodeports.download_target_ports( nodeports.PortTypeName.OUTPUTS, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasksutils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks_utils.py similarity index 57% rename from services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasksutils.py rename to services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks_utils.py index d533ebc793d..21d9adaebbb 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasksutils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks_utils.py @@ -1,4 +1,7 @@ import logging +import os +from datetime import timedelta +from typing import Final from models_library.callbacks_mapping import UserServiceCommand from servicelib.logging_utils import log_context @@ -9,10 +12,13 @@ ContainerExecTimeoutError, ) from ..models.shared_store import SharedStore -from ..modules.container_utils import run_command_in_container +from ..modules.mounted_fs import MountedVolumes +from .container_utils import run_command_in_container _logger = logging.getLogger(__name__) +_TIMEOUT_PERMISSION_CHANGES: Final[timedelta] = timedelta(minutes=5) + async def run_before_shutdown_actions( shared_store: SharedStore, before_shutdown: list[UserServiceCommand] @@ -40,3 +46,22 @@ async def run_before_shutdown_actions( container_name, exc_info=True, ) + + +async def ensure_read_permissions_on_user_service_data( + mounted_volumes: MountedVolumes, +) -> None: + # Makes sure sidecar has access to all files in the user services. + # The user could have removed read permissions form a file, which will cause an error. + + # NOTE: command runs inside self container since the user service container might not always be running + self_container = os.environ["HOSTNAME"] + for path_to_store in ( # apply changes to otuputs and all state folders + *mounted_volumes.disk_state_paths_iter(), + mounted_volumes.disk_outputs_path, + ): + await run_command_in_container( + self_container, + command=f"chmod -R o+rX '{path_to_store}'", + timeout=_TIMEOUT_PERMISSION_CHANGES.total_seconds(), + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py index 39ab1519dc8..0657ffe237e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py @@ -14,7 +14,6 @@ import magic from aiofiles.os import remove from aiofiles.tempfile import TemporaryDirectory as AioTemporaryDirectory -from models_library.basic_types import IDStr from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey @@ -132,7 +131,7 @@ async def upload_outputs( # pylint:disable=too-many-statements # noqa: PLR0915 2 if is_file_type(port.property_type) else 1 for port in ports_to_set ), - description=IDStr("uploading outputs"), + description="uploading outputs", ) ) for port in ports_to_set: @@ -245,7 +244,7 @@ async def _get_data_from_port( ) -> tuple[Port, ItemConcreteValue | None, ByteSize]: async with progress_bar.sub_progress( steps=2 if is_file_type(port.property_type) else 1, - description=IDStr("getting data"), + description="getting data", ) as sub_progress: with log_context(_logger, logging.DEBUG, f"getting {port.key=}"): port_data = await port.get(sub_progress) @@ -363,7 +362,7 @@ async def _get_date_from_port_notified( raise async with progress_bar.sub_progress( - steps=len(ports_to_get), description=IDStr("downloading") + steps=len(ports_to_get), description="downloading" ) as sub_progress: results = await limited_gather( *[ diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_handler.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_handler.py index 8d347d75e10..a61ea375286 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_handler.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_handler.py @@ -25,6 +25,10 @@ _logger = logging.getLogger(__name__) +def _get_first_entry_or_none(data: list[str]) -> str | None: + return next(iter(data), None) + + class _PortKeysEventHandler(SafeFileSystemEventHandler): # NOTE: runs in the created process @@ -42,6 +46,15 @@ def handle_set_outputs_port_keys(self, *, outputs_port_keys: set[str]) -> None: def handle_toggle_event_propagation(self, *, is_enabled: bool) -> None: self._is_event_propagation_enabled = is_enabled + def _get_relative_path_parents(self, path: bytes | str) -> list[str]: + try: + spath_relative_to_outputs = Path( + path.decode() if isinstance(path, bytes) else path + ).relative_to(self.outputs_path) + except ValueError: + return [] + return [f"{x}" for x in spath_relative_to_outputs.parents] + def event_handler(self, event: FileSystemEvent) -> None: if not self._is_event_propagation_enabled: return @@ -49,25 +62,29 @@ def event_handler(self, event: FileSystemEvent) -> None: # NOTE: ignoring all events which are not relative to modifying # the contents of the `port_key` folders from the outputs directory - path_relative_to_outputs = Path( - event.src_path.decode() - if isinstance(event.src_path, bytes) - else event.src_path - ).relative_to(self.outputs_path) + # NOTE: the `port_key` will be present in either the src_path or the dest_path + # depending on the type of event + + src_relative_path_parents = self._get_relative_path_parents(event.src_path) + dst_relative_path_parents = self._get_relative_path_parents(event.dest_path) # discard event if not part of a subfolder - relative_path_parents = path_relative_to_outputs.parents - event_in_subdirs = len(relative_path_parents) > 0 + event_in_subdirs = ( + len(src_relative_path_parents) > 0 or len(dst_relative_path_parents) > 0 + ) if not event_in_subdirs: return # only accept events generated inside `port_key` subfolder - port_key_candidate = f"{relative_path_parents[0]}" - - if port_key_candidate in self._outputs_port_keys: - # messages in this queue (part of the process), - # will be consumed by the asyncio thread - self.port_key_events_queue.put(port_key_candidate) + src_port_key_candidate = _get_first_entry_or_none(src_relative_path_parents) + dst_port_key_candidate = _get_first_entry_or_none(dst_relative_path_parents) + + for port_key_candidate in (src_port_key_candidate, dst_port_key_candidate): + if port_key_candidate in self._outputs_port_keys: + # messages in this queue (part of the process), + # will be consumed by the asyncio thread + self.port_key_events_queue.put(port_key_candidate) + break class _EventHandlerProcess: @@ -137,9 +154,7 @@ def _thread_worker_update_outputs_port_keys(self) -> None: # Propagate `outputs_port_keys` changes to the `_PortKeysEventHandler`. while True: - message: dict[ - str, Any - ] | None = ( + message: dict[str, Any] | None = ( self.outputs_context.file_system_event_handler_queue.get() # pylint:disable=no-member ) _logger.debug("received message %s", message) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py index 9a669aacc6b..f29f26358e2 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py @@ -8,7 +8,6 @@ from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI -from models_library.basic_types import IDStr from models_library.rabbitmq_messages import ProgressType from pydantic import PositiveFloat from servicelib import progress_bar @@ -136,7 +135,7 @@ async def _upload_ports() -> None: async with progress_bar.ProgressBarData( num_steps=1, progress_report_cb=self.task_progress_cb, - description=IDStr("uploading ports"), + description="uploading ports", ) as root_progress: await upload_outputs( outputs_path=self.outputs_context.outputs_path, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py index 0d4d4c08be2..2b77249e355 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py @@ -27,7 +27,7 @@ ], ) -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) class _ExtendedInotifyBuffer(InotifyBuffer): @@ -89,5 +89,5 @@ def on_any_event(self, event: FileSystemEvent) -> None: # which is running in the context of the # ExtendedInotifyObserver will cause the # observer to stop working. - with log_catch(logger, reraise=False): + with log_catch(_logger, reraise=False): self.event_handler(event) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py index df5ae853d24..eb7ad93ed9e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py @@ -107,13 +107,13 @@ async def _update_metrics(self): ) self._metrics_response = MetricsResponse.from_reply(metrics_fetch_result) except ContainerExecContainerNotFoundError as e: - _logger.info( - "Container %s was not found could nto recover metrics", + _logger.debug( + "Container %s was not found could not recover metrics", container_name, ) self._metrics_response = MetricsResponse.from_error(e) except Exception as e: # pylint: disable=broad-exception-caught - _logger.info("Unexpected exception", exc_info=True) + _logger.debug("Could not recover metrics", exc_info=True) self._metrics_response = MetricsResponse.from_error(e) async def _task_metrics_recovery(self) -> None: diff --git a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py index 6fffd8fabc1..e205946c90d 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py @@ -19,7 +19,7 @@ from botocore.client import Config from botocore.exceptions import ClientError from fastapi import FastAPI -from models_library.api_schemas_storage import S3BucketName +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID @@ -47,8 +47,9 @@ pytest_simcore_core_services_selection = [ "migration", "postgres", - "storage", + "rabbit", "redis", + "storage", ] pytest_simcore_ops_services_selection = [ @@ -87,6 +88,7 @@ def project_id(user_id: int, postgres_db: sa.engine.Engine) -> Iterable[ProjectI def mock_environment( mock_storage_check: None, mock_rabbit_check: None, + rabbit_service, postgres_host_config: PostgresTestConfig, storage_endpoint: URL, minio_s3_settings_envs: EnvVarsDict, @@ -104,10 +106,7 @@ def mock_environment( "DY_SIDECAR_PROJECT_ID": f"{project_id}", "R_CLONE_PROVIDER": "MINIO", "DY_SIDECAR_CALLBACKS_MAPPING": "{}", - "RABBIT_HOST": "test", - "RABBIT_PASSWORD": "test", - "RABBIT_SECURE": "0", - "RABBIT_USER": "test", + **{k: f"{v}" for k, v in rabbit_service.dict().items()}, **base_mock_envs, } @@ -167,7 +166,7 @@ async def simcore_storage_service(mocker: MockerFixture, app: FastAPI) -> None: # NOTE: Mock to ensure container IP agrees with host IP when testing mocker.patch( - "simcore_sdk.node_ports_common._filemanager._get_https_link_if_storage_secure", + "simcore_sdk.node_ports_common._filemanager_utils._get_https_link_if_storage_secure", replace_storage_endpoint(storage_host, int(storage_port)), ) diff --git a/services/dynamic-sidecar/tests/unit/conftest.py b/services/dynamic-sidecar/tests/unit/conftest.py index ee2c106bb69..47488a06e48 100644 --- a/services/dynamic-sidecar/tests/unit/conftest.py +++ b/services/dynamic-sidecar/tests/unit/conftest.py @@ -11,6 +11,7 @@ from aiodocker.volumes import DockerVolume from async_asgi_testclient import TestClient from fastapi import FastAPI +from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_dynamic_sidecar.core.application import AppState, create_app from simcore_service_dynamic_sidecar.core.docker_compose_utils import ( @@ -157,3 +158,10 @@ def port_notifier(app: FastAPI) -> PortNotifier: settings.DY_SIDECAR_PROJECT_ID, settings.DY_SIDECAR_NODE_ID, ) + + +@pytest.fixture +def mock_ensure_read_permissions_on_user_service_data(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_service_dynamic_sidecar.modules.long_running_tasks.ensure_read_permissions_on_user_service_data", + ) diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py b/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py index 7ce7027c6e0..27ec615b631 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py @@ -423,35 +423,6 @@ async def test_containers_docker_status_docker_error( assert response.status_code == mock_aiodocker_containers_get, response.text -async def test_container_inspect_logs_remove( - test_client: TestClient, started_containers: list[str] -): - for container in started_containers: - # get container logs - # FIXME: slow call? - response = await test_client.get(f"/{API_VTAG}/containers/{container}/logs") - assert response.status_code == status.HTTP_200_OK, response.text - - # inspect container - response = await test_client.get(f"/{API_VTAG}/containers/{container}") - assert response.status_code == status.HTTP_200_OK, response.text - parsed_response = response.json() - assert parsed_response["Name"] == f"/{container}" - - -async def test_container_logs_with_timestamps( - test_client: TestClient, started_containers: list[str] -): - for container in started_containers: - print("getting logs of container", container, "...") - response = await test_client.get( - f"/{API_VTAG}/containers/{container}/logs", - query_string={"timestamps": True}, - ) - assert response.status_code == status.HTTP_200_OK, response.text - assert response.json() == [] - - async def test_container_missing_container( test_client: TestClient, not_started_containers: list[str] ): @@ -461,11 +432,6 @@ def _expected_error_string(container: str) -> dict[str, str]: } for container in not_started_containers: - # get container logs - response = await test_client.get(f"/{API_VTAG}/containers/{container}/logs") - assert response.status_code == status.HTTP_404_NOT_FOUND, response.text - assert response.json() == _expected_error_string(container) - # inspect container response = await test_client.get(f"/{API_VTAG}/containers/{container}") assert response.status_code == status.HTTP_404_NOT_FOUND, response.text @@ -485,10 +451,6 @@ def _expected_error_string(status_code: int) -> dict[str, Any]: } for container in started_containers: - # get container logs - response = await test_client.get(f"/{API_VTAG}/containers/{container}/logs") - assert response.status_code == mock_aiodocker_containers_get, response.text - assert response.json() == _expected_error_string(mock_aiodocker_containers_get) # inspect container response = await test_client.get(f"/{API_VTAG}/containers/{container}") diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py b/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py index 9c050ae8a0e..75bc03dab74 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py @@ -520,6 +520,7 @@ def _get_awaitable() -> Awaitable: async def test_containers_down_after_starting( + mock_ensure_read_permissions_on_user_service_data: None, httpx_async_client: AsyncClient, client: Client, compose_spec: str, diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py index 662b7033b88..baa77632eb6 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py @@ -172,7 +172,7 @@ async def _get_task_id_docker_compose_down(httpx_async_client: AsyncClient) -> T def _get_resource_tracking_messages( - mock_core_rabbitmq: dict[str, AsyncMock] + mock_core_rabbitmq: dict[str, AsyncMock], ) -> list[RabbitResourceTrackingMessages]: return [ x[0][1] @@ -200,6 +200,7 @@ async def _wait_for_containers_to_be_running(app: FastAPI) -> None: async def test_service_starts_and_closes_as_expected( + mock_ensure_read_permissions_on_user_service_data: None, mock_core_rabbitmq: dict[str, AsyncMock], app: FastAPI, httpx_async_client: AsyncClient, @@ -383,6 +384,7 @@ async def _mocked_get_container_states( @pytest.mark.parametrize("expected_platform_state", SimcorePlatformStatus) async def test_user_services_crash_when_running( + mock_ensure_read_permissions_on_user_service_data: None, mock_core_rabbitmq: dict[str, AsyncMock], app: FastAPI, httpx_async_client: AsyncClient, diff --git a/services/dynamic-sidecar/tests/unit/test_modules_attribute_monitor.py b/services/dynamic-sidecar/tests/unit/test_modules_attribute_monitor.py index 93ea3d6b972..5266730830a 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_attribute_monitor.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_attribute_monitor.py @@ -125,6 +125,7 @@ async def logging_event_handler_observer( ], ) async def test_chown_triggers_event( + mock_ensure_read_permissions_on_user_service_data: None, logging_event_handler_observer: None, fake_dy_volumes_mount_dir: Path, command_template: str, diff --git a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py index 380e6fa639c..e8e00ac9c31 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py @@ -217,7 +217,7 @@ async def test_notifier_publish_disk_usage( jsonable_encoder(ServiceDiskUsage(node_id=node_id, usage=usage)) ) - await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS) + await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS * 2) @pytest.fixture @@ -311,7 +311,7 @@ async def test_notifier_send_input_port_status( ) ) - await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS) + await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS * 2) def _get_on_output_port_spy( @@ -400,4 +400,4 @@ async def test_notifier_send_output_port_status( ) ) - await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS) + await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS * 2) diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py index 35ccc7d72df..49d38d946ea 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py @@ -2,8 +2,9 @@ # pylint: disable=protected-access import asyncio +from collections.abc import AsyncIterable from pathlib import Path -from typing import AsyncIterable +from typing import Any, Final from unittest.mock import Mock import aioprocessing @@ -17,8 +18,16 @@ from simcore_service_dynamic_sidecar.modules.outputs._event_handler import ( EventHandlerObserver, _EventHandlerProcess, + _PortKeysEventHandler, ) from simcore_service_dynamic_sidecar.modules.outputs._manager import OutputsManager +from watchdog.events import ( + DirModifiedEvent, + FileClosedEvent, + FileCreatedEvent, + FileMovedEvent, + FileSystemEvent, +) @pytest.fixture @@ -124,3 +133,111 @@ async def test_event_handler_observer_health_degraded( await asyncio.sleep(observer_monitor.wait_for_heart_beat_interval_s * 3) await observer_monitor.stop() assert outputs_manager.set_all_ports_for_upload.call_count >= 1 + + +_STATE_PATH: Final[Path] = Path("/some/random/fake/path/for/state/") + + +@pytest.fixture +def mock_state_path() -> Path: + return _STATE_PATH + + +class _MockAioQueue: + def __init__(self) -> None: + self.items: list[Any] = [] + + def put(self, item: Any) -> None: + self.items.append(item) + + def get(self) -> Any | None: + try: + return self.items.pop() + except IndexError: + return None + + +@pytest.mark.parametrize( + "event, expected_port_key", + [ + pytest.param( + FileCreatedEvent(src_path=f"{_STATE_PATH}/untitled.txt", dest_path=""), + None, + id="file_create_outside", + ), + pytest.param( + FileCreatedEvent( + src_path=f"{_STATE_PATH}/output_1/untitled1.txt", + dest_path="", + ), + "output_1", + id="file_create_inside_monitored_port", + ), + pytest.param( + FileCreatedEvent( + src_path=f"{_STATE_PATH}/output_9/untitled1.txt", + dest_path="", + ), + None, + id="file_create_inside_not_monitored_port", + ), + pytest.param( + FileMovedEvent( + src_path=f"{_STATE_PATH}/untitled.txt", + dest_path=f"{_STATE_PATH}/asdsadsasad.txt", + ), + None, + id="move_outside_any_port", + ), + pytest.param( + FileMovedEvent( + src_path=f"{_STATE_PATH}/asdsadsasad.txt", + dest_path=f"{_STATE_PATH}/output_1/asdsadsasad.txt", + ), + "output_1", + id="move_to_monitored_port", + ), + pytest.param( + FileMovedEvent( + src_path=f"{_STATE_PATH}/asdsadsasad.txt", + dest_path=f"{_STATE_PATH}/output_9/asdsadsasad.txt", + ), + None, + id="move_outside_monitored_port", + ), + pytest.param( + DirModifiedEvent(src_path=f"{_STATE_PATH}/output_1", dest_path=""), + None, + id="modified_port_dir_does_nothing", + ), + pytest.param( + DirModifiedEvent(src_path=f"{_STATE_PATH}", dest_path=""), + None, + id="modified_outer_dir_does_nothing", + ), + pytest.param( + FileClosedEvent(src_path=f"{_STATE_PATH}/untitled.txt", dest_path=""), + None, + id="close_file_outside_does_nothing", + ), + pytest.param( + FileClosedEvent( + src_path=f"{_STATE_PATH}/output_1/asdsadsasad.txt", dest_path="" + ), + "output_1", + id="close_file_inside_triggers_event", + ), + ], +) +def test_port_keys_event_handler_triggers_for_events( + mock_state_path: Path, event: FileSystemEvent, expected_port_key: str | None +) -> None: + + queue = _MockAioQueue() + + event_handler = _PortKeysEventHandler(mock_state_path, queue) + event_handler.handle_set_outputs_port_keys(outputs_port_keys={"output_1"}) + event_handler.handle_toggle_event_propagation(is_enabled=True) + + event_handler.event_handler(event) + assert queue.get() == expected_port_key diff --git a/services/efs-guardian/docker/boot.sh b/services/efs-guardian/docker/boot.sh index ad8de46bfda..e2452e13a6e 100755 --- a/services/efs-guardian/docker/boot.sh +++ b/services/efs-guardian/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/efs-guardian - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt index 4d9bd5f7fc3..b53278448b3 100644 --- a/services/efs-guardian/requirements/_base.txt +++ b/services/efs-guardian/requirements/_base.txt @@ -93,7 +93,34 @@ attrs==24.2.0 # jsonschema # referencing boto3==1.35.36 - # via aiobotocore + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiobotocore botocore==1.35.36 # via # aiobotocore @@ -154,7 +181,10 @@ fastapi==0.115.5 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager # prometheus-fastapi-instrumentator +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.31 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -424,6 +454,8 @@ psutil==6.1.0 # -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via sqlalchemy +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -504,6 +536,32 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -586,6 +644,32 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.29.3 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -674,6 +758,10 @@ starlette==0.41.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -692,13 +780,13 @@ typer==0.12.5 # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -types-aiobotocore==2.15.2 +types-aiobotocore==2.19.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -types-aiobotocore-ec2==2.15.2 +types-aiobotocore-ec2==2.19.0 # via types-aiobotocore -types-aiobotocore-s3==2.15.2 +types-aiobotocore-s3==2.19.0 # via types-aiobotocore -types-aiobotocore-ssm==2.15.2 +types-aiobotocore-ssm==2.19.0 # via types-aiobotocore types-awscrt==0.22.0 # via botocore-stubs diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index f127390ccb8..30d77f1951e 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -33,7 +33,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -61,7 +61,7 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto charset-normalizer==3.4.0 # via @@ -71,32 +71,32 @@ click==8.1.7 # via # -c requirements/_base.txt # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto debugpy==1.8.12 # via -r requirements/_test.in -deepdiff==8.1.1 +deepdiff==8.2.0 # via -r requirements/_test.in docker==7.1.0 # via # -r requirements/_test.in # moto -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in -fakeredis==2.26.2 +fakeredis==2.27.0 # via -r requirements/_test.in flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto frozenlist==1.4.1 # via @@ -140,9 +140,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -172,10 +170,8 @@ markupsafe==3.0.1 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy multidict==6.1.0 @@ -189,7 +185,7 @@ openapi-schema-validator==0.6.3 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via moto -orderly-set==5.2.3 +orderly-set==5.3.0 # via deepdiff packaging==24.1 # via @@ -211,7 +207,7 @@ psutil==6.1.0 # via # -c requirements/_base.txt # -r requirements/_test.in -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi @@ -226,7 +222,7 @@ pydantic-core==2.27.1 # pydantic pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -246,7 +242,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -257,7 +252,6 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -321,9 +315,10 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # aws-sam-translator # cfn-lint - # faker # pydantic # pydantic-core +tzdata==2025.1 + # via faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt @@ -335,6 +330,7 @@ urllib3==2.2.3 werkzeug==3.1.3 # via # flask + # flask-cors # moto wrapt==1.16.0 # via diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt index 0c984e01581..bd233d72641 100644 --- a/services/efs-guardian/requirements/_tools.txt +++ b/services/efs-guardian/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -44,7 +44,7 @@ packaging==24.1 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,7 +68,7 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt setuptools==75.2.0 # via @@ -82,7 +82,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py index 217a3d0a1bd..956a3c891de 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -1,7 +1,7 @@ import logging from fastapi import FastAPI -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -43,7 +43,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app.state.settings = app_settings assert app.state.settings.API_VERSION == API_VERSION # nosec if app.state.settings.EFS_GUARDIAN_TRACING: - setup_tracing(app, app.state.settings.EFS_GUARDIAN_TRACING, APP_NAME) + initialize_tracing(app, app.state.settings.EFS_GUARDIAN_TRACING, APP_NAME) # PLUGINS SETUP setup_rabbitmq(app) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py index 2f42bc5d870..e1480f84b20 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py @@ -2,7 +2,6 @@ import logging from collections.abc import Awaitable, Callable from datetime import timedelta -from typing import TypedDict from fastapi import FastAPI from servicelib.async_utils import cancel_wait_task @@ -12,44 +11,31 @@ from .background_tasks import removal_policy_task from .modules.redis import get_redis_lock_client - -@exclusive_periodic( - get_redis_lock_client, - task_interval=timedelta(hours=1), - retry_after=timedelta(minutes=5), -) -async def periodic_removal_policy_task(app: FastAPI) -> None: - await removal_policy_task(app) - - _logger = logging.getLogger(__name__) -class EfsGuardianBackgroundTask(TypedDict): - name: str - task_func: Callable - - -_EFS_GUARDIAN_BACKGROUND_TASKS = [ - EfsGuardianBackgroundTask( - name="efs_removal_policy_task", task_func=periodic_removal_policy_task - ) -] - - def _on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]: async def _startup() -> None: with ( - log_context(_logger, logging.INFO, msg="Efs Guardian startup.."), + log_context(_logger, logging.INFO, msg="Efs Guardian background task "), log_catch(_logger, reraise=False), ): - app.state.efs_guardian_background_tasks = [] + app.state.efs_guardian_removal_policy_background_task = None - # Setup periodic tasks - for task in _EFS_GUARDIAN_BACKGROUND_TASKS: - app.state.efs_guardian_background_tasks.append( - asyncio.create_task(task["task_func"](), name=task["name"]) - ) + _logger.info("starting efs guardian removal policy task") + + @exclusive_periodic( + get_redis_lock_client(app), + task_interval=timedelta(hours=1), + retry_after=timedelta(minutes=5), + ) + async def _periodic_removal_policy_task() -> None: + await removal_policy_task(app) + + app.state.efs_guardian_removal_policy_background_task = asyncio.create_task( + _periodic_removal_policy_task(), + name=_periodic_removal_policy_task.__name__, + ) return _startup @@ -63,12 +49,9 @@ async def _stop() -> None: log_catch(_logger, reraise=False), ): assert _app # nosec - if _app.state.efs_guardian_background_tasks: - await asyncio.gather( - *[ - cancel_wait_task(task) - for task in _app.state.efs_guardian_background_tasks - ] + if _app.state.efs_guardian_removal_policy_background_task: + await cancel_wait_task( + _app.state.efs_guardian_removal_policy_background_task ) return _stop diff --git a/services/invitations/docker/boot.sh b/services/invitations/docker/boot.sh index 7cbf4401013..cd8d839d053 100755 --- a/services/invitations/docker/boot.sh +++ b/services/invitations/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/invitations - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/invitations/openapi.json b/services/invitations/openapi.json index 6fe64f4702d..508191b0419 100644 --- a/services/invitations/openapi.json +++ b/services/invitations/openapi.json @@ -286,6 +286,9 @@ }, "invitation_url": { "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri", "title": "Invitation Url", "description": "Invitation link" } @@ -434,6 +437,9 @@ }, "docs_url": { "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri", "title": "Docs Url" } }, diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt index 515fe9c9567..5940608f49a 100644 --- a/services/invitations/requirements/_base.txt +++ b/services/invitations/requirements/_base.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.0 +aio-pika==9.5.5 # via -r requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 # via -r requirements/../../../packages/service-library/requirements/_base.in @@ -8,9 +8,9 @@ aiodocker==0.24.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.7 +aiohttp==3.11.13 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -27,11 +27,11 @@ aiohttp==3.11.7 # aiodocker aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp annotated-types==0.7.0 # via pydantic -anyio==4.6.2.post1 +anyio==4.8.0 # via # fast-depends # faststream @@ -45,12 +45,12 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -attrs==24.2.0 +attrs==25.2.0 # via # aiohttp # jsonschema # referencing -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -69,13 +69,13 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests -click==8.1.7 +click==8.1.8 # via # typer # uvicorn -cryptography==43.0.3 +cryptography==44.0.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -90,7 +90,7 @@ cryptography==43.0.3 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -deprecated==1.2.15 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -104,21 +104,24 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.5 +fastapi==0.115.11 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in -faststream==0.5.31 + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +faststream==0.5.35 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.69.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -grpcio==1.68.0 +grpcio==1.71.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via @@ -128,7 +131,7 @@ httpcore==1.0.7 # via httpx httptools==0.6.4 # via uvicorn -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -150,7 +153,7 @@ idna==3.10 # httpx # requests # yarl -importlib-metadata==8.5.0 +importlib-metadata==8.6.1 # via opentelemetry-api jsonschema==4.23.0 # via @@ -166,7 +169,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.2 +opentelemetry-api==1.31.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -180,17 +183,17 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.31.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.31.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.31.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.31.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.52b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -198,29 +201,29 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.52b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.49b2 +opentelemetry-instrumentation-fastapi==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.52b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.31.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.31.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.52b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -229,13 +232,13 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.52b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests -orjson==3.10.12 +orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -263,25 +266,27 @@ packaging==24.2 # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq -prometheus-client==0.21.0 +prometheus-client==0.21.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # prometheus-fastapi-instrumentator -prometheus-fastapi-instrumentator==7.0.0 +prometheus-fastapi-instrumentator==7.0.2 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -propcache==0.2.0 +propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.28.3 +protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.0 +psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycparser==2.22 # via cffi -pydantic==2.10.2 +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -310,9 +315,9 @@ pydantic==2.10.2 # fastapi # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.3 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -322,15 +327,27 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -pygments==2.18.0 +pygments==2.19.1 # via rich -pyinstrument==5.0.0 +pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -371,6 +388,18 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -380,19 +409,17 @@ rich==13.9.4 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.21.0 +rpds-py==0.23.1 # via # jsonschema # referencing shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -starlette==0.41.3 + # via anyio +starlette==0.46.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -408,22 +435,25 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.13.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in -types-python-dateutil==2.9.0.20241003 +types-python-dateutil==2.9.0.20241206 # via arrow typing-extensions==4.12.2 # via # aiodebug + # anyio # fastapi # faststream # opentelemetry-sdk @@ -431,7 +461,7 @@ typing-extensions==4.12.2 # pydantic-core # pydantic-extra-types # typer -urllib3==2.2.3 +urllib3==2.3.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -446,23 +476,23 @@ urllib3==2.2.3 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests -uvicorn==0.32.1 +uvicorn==0.34.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in uvloop==0.21.0 # via uvicorn -watchfiles==1.0.0 +watchfiles==1.0.4 # via uvicorn -websockets==14.1 +websockets==15.0.1 # via uvicorn -wrapt==1.17.0 +wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis -yarl==1.18.0 +yarl==1.18.3 # via # -r requirements/../../../packages/service-library/requirements/_base.in # aio-pika diff --git a/services/invitations/requirements/_test.txt b/services/invitations/requirements/_test.txt index 97e071db1a5..8db5683f40d 100644 --- a/services/invitations/requirements/_test.txt +++ b/services/invitations/requirements/_test.txt @@ -1,22 +1,22 @@ -anyio==4.6.2.post1 +anyio==4.8.0 # via # -c requirements/_base.txt # httpx -attrs==24.2.0 +attrs==25.2.0 # via # -c requirements/_base.txt # hypothesis -certifi==2024.8.30 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # httpcore # httpx -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -faker==35.0.0 +faker==37.0.0 # via -r requirements/_test.in h11==0.14.0 # via @@ -26,12 +26,12 @@ httpcore==1.0.7 # via # -c requirements/_base.txt # httpx -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in -hypothesis==6.124.7 +hypothesis==6.129.0 # via -r requirements/_test.in idna==3.10 # via @@ -47,7 +47,7 @@ packaging==24.2 # pytest-sugar pluggy==1.5.0 # via pytest -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -63,23 +63,14 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker python-dotenv==1.0.1 # via # -c requirements/_base.txt # -r requirements/_test.in -six==1.16.0 - # via - # -c requirements/_base.txt - # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt # anyio - # httpx sortedcontainers==2.4.0 # via hypothesis termcolor==2.5.0 @@ -87,4 +78,6 @@ termcolor==2.5.0 typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker + # anyio +tzdata==2025.1 + # via faker diff --git a/services/invitations/requirements/_tools.txt b/services/invitations/requirements/_tools.txt index f7afb651ac2..a2cf1e41924 100644 --- a/services/invitations/requirements/_tools.txt +++ b/services/invitations/requirements/_tools.txt @@ -1,6 +1,6 @@ -astroid==3.3.8 +astroid==3.3.9 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # black @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.9 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -43,7 +43,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -54,7 +54,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.5 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -66,9 +66,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==76.0.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -77,7 +77,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.3 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/invitations/src/simcore_service_invitations/core/application.py b/services/invitations/src/simcore_service_invitations/core/application.py index 98b798862c1..b3f96c07a82 100644 --- a/services/invitations/src/simcore_service_invitations/core/application.py +++ b/services/invitations/src/simcore_service_invitations/core/application.py @@ -3,7 +3,7 @@ from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -41,7 +41,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: if app.state.settings.INVITATIONS_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) if app.state.settings.INVITATIONS_TRACING: - setup_tracing(app, app.state.settings.INVITATIONS_TRACING, APP_NAME) + initialize_tracing(app, app.state.settings.INVITATIONS_TRACING, APP_NAME) # ERROR HANDLERS exceptions_handlers.setup(app) diff --git a/services/invitations/tests/unit/test__model_examples.py b/services/invitations/tests/unit/test__model_examples.py index 78dfdd96669..b295c457ea8 100644 --- a/services/invitations/tests/unit/test__model_examples.py +++ b/services/invitations/tests/unit/test__model_examples.py @@ -5,14 +5,16 @@ # pylint: disable=unused-variable import itertools -import json from typing import Any import pytest import simcore_service_invitations import simcore_service_invitations.api._invitations from pydantic import BaseModel -from pytest_simcore.pydantic_models import iter_model_examples_in_module +from pytest_simcore.pydantic_models import ( + assert_validation_model, + iter_model_examples_in_module, +) @pytest.mark.parametrize( @@ -23,7 +25,8 @@ ), ) def test_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - print(example_name, ":", json.dumps(example_data)) - assert model_cls.model_validate(example_data) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/services/migration/requirements/_test.txt b/services/migration/requirements/_test.txt index 528fc61d228..4bed24a2e59 100644 --- a/services/migration/requirements/_test.txt +++ b/services/migration/requirements/_test.txt @@ -3,13 +3,13 @@ attrs==25.1.0 # jsonschema # pytest-docker # referencing -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # requests charset-normalizer==3.4.1 # via requests -coverage==7.6.10 +coverage==7.6.12 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in @@ -23,7 +23,7 @@ jsonschema==4.23.0 # via -r requirements/_test.in jsonschema-specifications==2024.10.1 # via jsonschema -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -31,7 +31,7 @@ packaging==24.2 # via pytest pluggy==1.5.0 # via pytest -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -44,7 +44,7 @@ pytest-asyncio==0.23.8 # -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-mock==3.14.0 # via -r requirements/_test.in @@ -63,7 +63,7 @@ referencing==0.35.1 # jsonschema-specifications requests==2.32.3 # via docker -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing diff --git a/services/migration/requirements/_tools.txt b/services/migration/requirements/_tools.txt index cec972d940b..19e0de20677 100644 --- a/services/migration/requirements/_tools.txt +++ b/services/migration/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -18,15 +18,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -44,7 +44,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -77,7 +77,7 @@ typing-extensions==4.12.2 # via # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/payments/docker/boot.sh b/services/payments/docker/boot.sh index eb77d928809..fa3024433dc 100755 --- a/services/payments/docker/boot.sh +++ b/services/payments/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/payments - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt index a3a8b15b9d7..0d9c9bb03a5 100644 --- a/services/payments/requirements/_base.txt +++ b/services/payments/requirements/_base.txt @@ -124,6 +124,9 @@ fastapi==0.115.5 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.31 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 @@ -355,6 +358,8 @@ pyasn1==0.6.1 # rsa pycparser==2.22 # via cffi +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -403,6 +408,20 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -462,6 +481,20 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.35.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications requests==2.32.3 @@ -540,6 +573,8 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # fastapi # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==1.0.0 diff --git a/services/payments/requirements/_test.txt b/services/payments/requirements/_test.txt index 010e419a179..011201390a5 100644 --- a/services/payments/requirements/_test.txt +++ b/services/payments/requirements/_test.txt @@ -36,13 +36,13 @@ charset-normalizer==3.4.0 # via # -c requirements/_base.txt # requests -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in frozenlist==1.5.0 # via @@ -85,7 +85,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -103,7 +103,7 @@ propcache==0.2.0 # -c requirements/_base.txt # aiohttp # yarl -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -125,10 +125,6 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # faker python-dotenv==1.0.1 # via # -c requirements/_base.txt @@ -151,10 +147,6 @@ simple-websocket==1.1.0 # via # -c requirements/_base.txt # python-engineio -six==1.16.0 - # via - # -c requirements/_base.txt - # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt @@ -172,18 +164,19 @@ termcolor==2.5.0 # via pytest-sugar types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -types-pyasn1==0.6.0.20240913 +types-pyasn1==0.6.0.20250208 # via types-python-jose -types-python-jose==3.3.4.20240106 +types-python-jose==3.4.0.20250224 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in typing-extensions==4.12.2 # via # -c requirements/_base.txt - # faker # mypy # sqlalchemy2-stubs +tzdata==2025.1 + # via faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/payments/requirements/_tools.txt b/services/payments/requirements/_tools.txt index 554c8afc184..c49f6c3693d 100644 --- a/services/payments/requirements/_tools.txt +++ b/services/payments/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -46,7 +46,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -57,7 +57,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -79,7 +79,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/services/payments/src/simcore_service_payments/core/application.py b/services/payments/src/simcore_service_payments/core/application.py index c85c7f91420..6bfaf9b2dec 100644 --- a/services/payments/src/simcore_service_payments/core/application.py +++ b/services/payments/src/simcore_service_payments/core/application.py @@ -3,7 +3,7 @@ from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -71,7 +71,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: if app.state.settings.PAYMENTS_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) if app.state.settings.PAYMENTS_TRACING: - setup_tracing(app, app.state.settings.PAYMENTS_TRACING, APP_NAME) + initialize_tracing(app, app.state.settings.PAYMENTS_TRACING, APP_NAME) # ERROR HANDLERS # ... add here ... diff --git a/services/payments/src/simcore_service_payments/db/payments_methods_repo.py b/services/payments/src/simcore_service_payments/db/payments_methods_repo.py index 4eb43b667b1..cea7b8e6158 100644 --- a/services/payments/src/simcore_service_payments/db/payments_methods_repo.py +++ b/services/payments/src/simcore_service_payments/db/payments_methods_repo.py @@ -1,6 +1,6 @@ import datetime -import simcore_postgres_database.errors as db_errors +import simcore_postgres_database.aiopg_errors as db_errors import sqlalchemy as sa from arrow import utcnow from models_library.api_schemas_payments.errors import ( diff --git a/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py b/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py index 8b2eef6f228..d7f6b893668 100644 --- a/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py +++ b/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py @@ -13,7 +13,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import HttpUrl, PositiveInt, TypeAdapter -from simcore_postgres_database import errors as pg_errors +from simcore_postgres_database import aiopg_errors as pg_errors from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, payments_transactions, diff --git a/services/payments/tests/unit/test__model_examples.py b/services/payments/tests/unit/test__model_examples.py index c97e35a4686..c52525a65db 100644 --- a/services/payments/tests/unit/test__model_examples.py +++ b/services/payments/tests/unit/test__model_examples.py @@ -4,13 +4,15 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -import json from typing import Any import pytest import simcore_service_payments.models -from pydantic import BaseModel, ValidationError -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pydantic import BaseModel +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) @pytest.mark.parametrize( @@ -18,13 +20,8 @@ walk_model_examples_in_package(simcore_service_payments.models), ) def test_api_server_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - try: - assert model_cls.model_validate(example_data) is not None - except ValidationError as err: - pytest.fail( - f"{example_name} is invalid {model_cls.__module__}.{model_cls.__name__}:" - f"\n{json.dumps(example_data, indent=1)}" - f"\nError: {err}" - ) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/services/resource-usage-tracker/docker/boot.sh b/services/resource-usage-tracker/docker/boot.sh index 28854b7b2b5..ddfbfaf306e 100755 --- a/services/resource-usage-tracker/docker/boot.sh +++ b/services/resource-usage-tracker/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/resource-usage-tracker - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi # @@ -48,7 +48,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/resource-usage-tracker/src/simcore_service_resource_usage_tracker && \ - python -m debugpy --listen 0.0.0.0:${RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT} -m uvicorn web_main:the_app \ + python -m debugpy --listen 0.0.0.0:${RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages diff --git a/services/resource-usage-tracker/openapi.json b/services/resource-usage-tracker/openapi.json index b267c3f0a9e..cef757856bf 100644 --- a/services/resource-usage-tracker/openapi.json +++ b/services/resource-usage-tracker/openapi.json @@ -184,7 +184,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PricingPlanGet" + "$ref": "#/components/schemas/RutPricingPlanGet" } } } @@ -249,7 +249,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PricingUnitGet" + "$ref": "#/components/schemas/RutPricingUnitGet" } } } @@ -383,7 +383,7 @@ ], "title": "PricingPlanClassification" }, - "PricingPlanGet": { + "RutPricingPlanGet": { "properties": { "pricing_plan_id": { "type": "integer", @@ -415,7 +415,7 @@ "anyOf": [ { "items": { - "$ref": "#/components/schemas/PricingUnitGet" + "$ref": "#/components/schemas/RutPricingUnitGet" }, "type": "array" }, @@ -441,9 +441,9 @@ "pricing_units", "is_active" ], - "title": "PricingPlanGet" + "title": "RutPricingPlanGet" }, - "PricingUnitGet": { + "RutPricingUnitGet": { "properties": { "pricing_unit_id": { "type": "integer", @@ -456,7 +456,15 @@ "title": "Unit Name" }, "unit_extra_info": { - "$ref": "#/components/schemas/UnitExtraInfo" + "anyOf": [ + { + "$ref": "#/components/schemas/UnitExtraInfoTier" + }, + { + "$ref": "#/components/schemas/UnitExtraInfoLicense" + } + ], + "title": "Unit Extra Info" }, "current_cost_per_unit": { "type": "string", @@ -486,9 +494,25 @@ "default", "specific_info" ], - "title": "PricingUnitGet" + "title": "RutPricingUnitGet" + }, + "UnitExtraInfoLicense": { + "properties": { + "num_of_seats": { + "type": "integer", + "minimum": 0, + "title": "Num Of Seats" + } + }, + "additionalProperties": true, + "type": "object", + "required": [ + "num_of_seats" + ], + "title": "UnitExtraInfoLicense", + "description": "Custom information that is propagated to the frontend. Defined fields are mandatory." }, - "UnitExtraInfo": { + "UnitExtraInfoTier": { "properties": { "CPU": { "type": "integer", @@ -513,7 +537,7 @@ "RAM", "VRAM" ], - "title": "UnitExtraInfo", + "title": "UnitExtraInfoTier", "description": "Custom information that is propagated to the frontend. Defined fields are mandatory." }, "ValidationError": { diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index b20058f6992..97d6e5b8891 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -93,7 +93,34 @@ attrs==23.2.0 # jsonschema # referencing boto3==1.34.131 - # via aiobotocore + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiobotocore botocore==1.34.131 # via # aiobotocore @@ -160,7 +187,10 @@ fastapi==0.115.5 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # fastapi-lifespan-manager # prometheus-fastapi-instrumentator +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.31 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -452,6 +482,8 @@ psutil==6.0.0 # -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.9 # via sqlalchemy +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -532,6 +564,32 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.6.1 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -626,6 +684,32 @@ redis==5.2.1 # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.29.3 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications regex==2023.12.25 @@ -721,6 +805,10 @@ starlette==0.41.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in tenacity==8.5.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -740,13 +828,13 @@ typer==0.12.3 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in -types-aiobotocore==2.12.1 +types-aiobotocore==2.19.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -types-aiobotocore-ec2==2.12.1 +types-aiobotocore-ec2==2.19.0 # via types-aiobotocore -types-aiobotocore-s3==2.12.1 +types-aiobotocore-s3==2.19.0.post1 # via types-aiobotocore -types-aiobotocore-ssm==2.12.3 +types-aiobotocore-ssm==2.19.0 # via types-aiobotocore types-awscrt==0.20.5 # via botocore-stubs diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index e7bea4661cb..144675d1046 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -19,7 +19,7 @@ attrs==23.2.0 # -c requirements/_base.txt # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -47,7 +47,7 @@ certifi==2024.2.2 # requests cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto charset-normalizer==3.3.2 # via @@ -57,11 +57,11 @@ click==8.1.7 # via # -c requirements/_base.txt # flask -coverage==7.6.10 +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc @@ -70,15 +70,15 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in -fakeredis==2.26.2 +fakeredis==2.27.0 # via -r requirements/_test.in flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto graphql-core==3.2.6 # via moto @@ -120,9 +120,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -158,13 +156,11 @@ markupsafe==2.1.5 # jinja2 # mako # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -185,7 +181,7 @@ pluggy==1.5.0 # via pytest ply==3.11 # via jsonpath-ng -py-partiql-parser==0.5.6 +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi @@ -202,7 +198,7 @@ pyparsing==3.1.2 # via # -c requirements/_base.txt # moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -225,7 +221,6 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker # moto python-dotenv==1.0.1 # via @@ -236,7 +231,6 @@ pyyaml==6.0.1 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -310,7 +304,7 @@ sympy==1.13.3 # via cfn-lint termcolor==2.5.0 # via pytest-sugar -types-requests==2.32.0.20241016 +types-requests==2.32.0.20250301 # via -r requirements/_test.in typing-extensions==4.12.2 # via @@ -318,11 +312,14 @@ typing-extensions==4.12.2 # alembic # aws-sam-translator # cfn-lint - # faker # mypy # pydantic # pydantic-core # sqlalchemy2-stubs +tzdata==2024.1 + # via + # -c requirements/_base.txt + # faker urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt @@ -335,6 +332,7 @@ urllib3==2.2.3 werkzeug==3.1.3 # via # flask + # flask-cors # moto wrapt==1.16.0 # via diff --git a/services/resource-usage-tracker/requirements/_tools.txt b/services/resource-usage-tracker/requirements/_tools.txt index 7bc674ecdf6..4ae88566afd 100644 --- a/services/resource-usage-tracker/requirements/_tools.txt +++ b/services/resource-usage-tracker/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -47,7 +47,7 @@ packaging==24.0 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -58,7 +58,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -71,7 +71,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -85,7 +85,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rest/_resource_tracker.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rest/_resource_tracker.py index 749e47f7938..a7397244928 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rest/_resource_tracker.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rest/_resource_tracker.py @@ -7,8 +7,8 @@ WalletTotalCredits, ) from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingUnitGet, ) from models_library.resource_tracker import CreditTransactionId @@ -63,14 +63,14 @@ async def create_credit_transaction( @router.get( "/services/{service_key:path}/{service_version}/pricing-plan", - response_model=PricingPlanGet, + response_model=RutPricingPlanGet, operation_id="get_service_default_pricing_plan", description="Returns a default pricing plan with pricing details for a specified service", tags=["pricing-plans"], ) async def get_service_default_pricing_plan( service_pricing_plans: Annotated[ - PricingPlanGet, + RutPricingPlanGet, Depends(pricing_plans.get_service_default_pricing_plan), ], ): @@ -79,14 +79,14 @@ async def get_service_default_pricing_plan( @router.get( "/pricing-plans/{pricing_plan_id}/pricing-units/{pricing_unit_id}", - response_model=PricingUnitGet, + response_model=RutPricingUnitGet, operation_id="list_service_pricing_plans", description="Returns a list of service pricing plans with pricing details for a specified service", tags=["pricing-plans"], ) async def get_pricing_plan_unit( pricing_unit: Annotated[ - PricingUnitGet, + RutPricingUnitGet, Depends(pricing_units.get_pricing_unit), ] ): diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_licensed_items_checkouts.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_licensed_items_checkouts.py index e37c4269045..859b501d4bd 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_licensed_items_checkouts.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_licensed_items_checkouts.py @@ -3,7 +3,7 @@ LicensedItemCheckoutGet, LicensedItemsCheckoutsPage, ) -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from models_library.products import ProductName from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -62,6 +62,8 @@ async def checkout_licensed_item( app: FastAPI, *, licensed_item_id: LicensedItemID, + key: LicensedItemKey, + version: LicensedItemVersion, wallet_id: WalletID, product_name: ProductName, num_of_seats: int, @@ -72,6 +74,8 @@ async def checkout_licensed_item( return await licensed_items_checkouts.checkout_licensed_item( db_engine=app.state.engine, licensed_item_id=licensed_item_id, + key=key, + version=version, wallet_id=wallet_id, product_name=product_name, num_of_seats=num_of_seats, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_pricing_plans.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_pricing_plans.py index 963ea4b7fd9..f5499437c0a 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_pricing_plans.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_pricing_plans.py @@ -1,9 +1,9 @@ from fastapi import FastAPI from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingPlanPage, PricingPlanToServiceGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingPlanPage, + RutPricingUnitGet, ) from models_library.products import ProductName from models_library.resource_tracker import ( @@ -16,6 +16,9 @@ ) from models_library.services import ServiceKey, ServiceVersion from servicelib.rabbitmq import RPCRouter +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + PricingUnitDuplicationError, +) from ...services import pricing_plans, pricing_units @@ -31,7 +34,7 @@ async def get_pricing_plan( *, product_name: ProductName, pricing_plan_id: PricingPlanId, -) -> PricingPlanGet: +) -> RutPricingPlanGet: return await pricing_plans.get_pricing_plan( product_name=product_name, pricing_plan_id=pricing_plan_id, @@ -40,7 +43,7 @@ async def get_pricing_plan( @router.expose(reraise_if_error_type=()) -async def list_pricing_plans( +async def list_pricing_plans_without_pricing_units( app: FastAPI, *, product_name: ProductName, @@ -48,8 +51,8 @@ async def list_pricing_plans( # pagination offset: int, limit: int, -) -> PricingPlanPage: - return await pricing_plans.list_pricing_plans_by_product( +) -> RutPricingPlanPage: + return await pricing_plans.list_pricing_plans_without_pricing_units( db_engine=app.state.engine, product_name=product_name, exclude_inactive=exclude_inactive, @@ -63,7 +66,7 @@ async def create_pricing_plan( app: FastAPI, *, data: PricingPlanCreate, -) -> PricingPlanGet: +) -> RutPricingPlanGet: return await pricing_plans.create_pricing_plan( data=data, db_engine=app.state.engine, @@ -76,7 +79,7 @@ async def update_pricing_plan( *, product_name: ProductName, data: PricingPlanUpdate, -) -> PricingPlanGet: +) -> RutPricingPlanGet: return await pricing_plans.update_pricing_plan( product_name=product_name, data=data, @@ -94,7 +97,7 @@ async def get_pricing_unit( product_name: ProductName, pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, -) -> PricingUnitGet: +) -> RutPricingUnitGet: return await pricing_units.get_pricing_unit( product_name=product_name, pricing_plan_id=pricing_plan_id, @@ -103,13 +106,13 @@ async def get_pricing_unit( ) -@router.expose(reraise_if_error_type=()) +@router.expose(reraise_if_error_type=(PricingUnitDuplicationError,)) async def create_pricing_unit( app: FastAPI, *, product_name: ProductName, data: PricingUnitWithCostCreate, -) -> PricingUnitGet: +) -> RutPricingUnitGet: return await pricing_units.create_pricing_unit( product_name=product_name, data=data, @@ -123,7 +126,7 @@ async def update_pricing_unit( *, product_name: ProductName, data: PricingUnitWithCostUpdate, -) -> PricingUnitGet: +) -> RutPricingUnitGet: return await pricing_units.update_pricing_unit( product_name=product_name, data=data, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py index 7299703a6bd..ca07c55f20c 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py @@ -2,7 +2,7 @@ from fastapi import FastAPI from servicelib.fastapi.openapi import override_fastapi_openapi_method -from servicelib.fastapi.tracing import setup_tracing +from servicelib.fastapi.tracing import initialize_tracing from .._meta import ( API_VERSION, @@ -67,7 +67,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: setup_process_message_running_service(app) # Requires Rabbit if app.state.settings.RESOURCE_USAGE_TRACKER_TRACING: - setup_tracing( + initialize_tracing( app, app.state.settings.RESOURCE_USAGE_TRACKER_TRACING, app.state.settings.APP_NAME, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_checkouts.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_checkouts.py index 774e4505230..8dd1ff5e929 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_checkouts.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_checkouts.py @@ -1,6 +1,6 @@ from datetime import datetime -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from models_library.products import ProductName from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -14,6 +14,8 @@ class LicensedItemCheckoutDB(BaseModel): licensed_item_checkout_id: LicensedItemCheckoutID licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID user_id: UserID user_email: str @@ -29,6 +31,8 @@ class LicensedItemCheckoutDB(BaseModel): class CreateLicensedItemCheckoutDB(BaseModel): licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID user_id: UserID user_email: str diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_purchases.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_purchases.py index 32630844fe7..dd23b87e4e8 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_purchases.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/licensed_items_purchases.py @@ -2,7 +2,7 @@ from decimal import Decimal from models_library.emails import LowerCaseEmailStr -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from models_library.products import ProductName from models_library.resource_tracker import PricingUnitCostId from models_library.resource_tracker_licensed_items_purchases import ( @@ -17,6 +17,8 @@ class LicensedItemsPurchasesDB(BaseModel): licensed_item_purchase_id: LicensedItemPurchaseID product_name: ProductName licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID wallet_name: str pricing_unit_cost_id: PricingUnitCostId @@ -35,6 +37,8 @@ class LicensedItemsPurchasesDB(BaseModel): class CreateLicensedItemsPurchasesDB(BaseModel): product_name: ProductName licensed_item_id: LicensedItemID + key: LicensedItemKey + version: LicensedItemVersion wallet_id: WalletID wallet_name: str pricing_unit_cost_id: PricingUnitCostId diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/pricing_units.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/pricing_units.py index bffc25e951c..3cb59c161d4 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/pricing_units.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/pricing_units.py @@ -7,7 +7,8 @@ PricingPlanId, PricingUnitCostId, PricingUnitId, - UnitExtraInfo, + UnitExtraInfoLicense, + UnitExtraInfoTier, ) from pydantic import BaseModel, ConfigDict, field_validator @@ -16,7 +17,7 @@ class PricingUnitsDB(BaseModel): pricing_unit_id: PricingUnitId pricing_plan_id: PricingPlanId unit_name: str - unit_extra_info: UnitExtraInfo + unit_extra_info: UnitExtraInfoTier | UnitExtraInfoLicense default: bool specific_info: HardwareInfo created: datetime diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_checkouts.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_checkouts.py index a973fd95ea5..549118884a9 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_checkouts.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_checkouts.py @@ -6,7 +6,7 @@ LicensedItemCheckoutGet, LicensedItemsCheckoutsPage, ) -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID, LicensedItemKey, LicensedItemVersion from models_library.products import ProductName from models_library.resource_tracker import ServiceRunStatus from models_library.resource_tracker_licensed_items_checkouts import ( @@ -58,6 +58,8 @@ async def list_licensed_items_checkouts( LicensedItemCheckoutGet( licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, licensed_item_id=licensed_item_checkout_db.licensed_item_id, + key=licensed_item_checkout_db.key, + version=licensed_item_checkout_db.version, wallet_id=licensed_item_checkout_db.wallet_id, user_id=licensed_item_checkout_db.user_id, user_email=licensed_item_checkout_db.user_email, @@ -89,6 +91,8 @@ async def get_licensed_item_checkout( return LicensedItemCheckoutGet( licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, licensed_item_id=licensed_item_checkout_db.licensed_item_id, + key=licensed_item_checkout_db.key, + version=licensed_item_checkout_db.version, wallet_id=licensed_item_checkout_db.wallet_id, user_id=licensed_item_checkout_db.user_id, user_email=licensed_item_checkout_db.user_email, @@ -104,6 +108,8 @@ async def checkout_licensed_item( db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], *, licensed_item_id: LicensedItemID, + key: LicensedItemKey, + version: LicensedItemVersion, wallet_id: WalletID, product_name: ProductName, num_of_seats: int, @@ -112,20 +118,20 @@ async def checkout_licensed_item( user_email: str, ) -> LicensedItemCheckoutGet: - _active_purchased_seats: int = await licensed_items_purchases_db.get_active_purchased_seats_for_item_and_wallet( + _active_purchased_seats: int = await licensed_items_purchases_db.get_active_purchased_seats_for_key_version_wallet( db_engine, - licensed_item_id=licensed_item_id, + key=key, + version=version, wallet_id=wallet_id, product_name=product_name, ) - _currently_used_seats = ( - await licensed_items_checkouts_db.get_currently_used_seats_for_item_and_wallet( - db_engine, - licensed_item_id=licensed_item_id, - wallet_id=wallet_id, - product_name=product_name, - ) + _currently_used_seats = await licensed_items_checkouts_db.get_currently_used_seats_for_key_version_wallet( + db_engine, + key=key, + version=version, + wallet_id=wallet_id, + product_name=product_name, ) available_seats = _active_purchased_seats - _currently_used_seats @@ -155,6 +161,8 @@ async def checkout_licensed_item( _create_item_checkout = CreateLicensedItemCheckoutDB( licensed_item_id=licensed_item_id, + key=key, + version=version, wallet_id=wallet_id, user_id=user_id, user_email=user_email, @@ -171,6 +179,8 @@ async def checkout_licensed_item( return LicensedItemCheckoutGet( licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, licensed_item_id=licensed_item_checkout_db.licensed_item_id, + key=licensed_item_checkout_db.key, + version=licensed_item_checkout_db.version, wallet_id=licensed_item_checkout_db.wallet_id, user_id=licensed_item_checkout_db.user_id, user_email=licensed_item_checkout_db.user_email, @@ -201,6 +211,8 @@ async def release_licensed_item( return LicensedItemCheckoutGet( licensed_item_checkout_id=licensed_item_checkout_db.licensed_item_checkout_id, licensed_item_id=licensed_item_checkout_db.licensed_item_id, + key=licensed_item_checkout_db.key, + version=licensed_item_checkout_db.version, wallet_id=licensed_item_checkout_db.wallet_id, user_id=licensed_item_checkout_db.user_id, user_email=licensed_item_checkout_db.user_email, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_purchases.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_purchases.py index f085de18406..0e5c7abef81 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_purchases.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/licensed_items_purchases.py @@ -54,6 +54,8 @@ async def list_licensed_items_purchases( licensed_item_purchase_id=licensed_item_purchase_db.licensed_item_purchase_id, product_name=licensed_item_purchase_db.product_name, licensed_item_id=licensed_item_purchase_db.licensed_item_id, + key=licensed_item_purchase_db.key, + version=licensed_item_purchase_db.version, wallet_id=licensed_item_purchase_db.wallet_id, wallet_name=licensed_item_purchase_db.wallet_name, pricing_unit_cost_id=licensed_item_purchase_db.pricing_unit_cost_id, @@ -89,6 +91,8 @@ async def get_licensed_item_purchase( licensed_item_purchase_id=licensed_item_purchase_db.licensed_item_purchase_id, product_name=licensed_item_purchase_db.product_name, licensed_item_id=licensed_item_purchase_db.licensed_item_id, + key=licensed_item_purchase_db.key, + version=licensed_item_purchase_db.version, wallet_id=licensed_item_purchase_db.wallet_id, wallet_name=licensed_item_purchase_db.wallet_name, pricing_unit_cost_id=licensed_item_purchase_db.pricing_unit_cost_id, @@ -114,6 +118,8 @@ async def create_licensed_item_purchase( item_purchase_create = CreateLicensedItemsPurchasesDB( product_name=data.product_name, licensed_item_id=data.licensed_item_id, + key=data.key, + version=data.version, wallet_id=data.wallet_id, wallet_name=data.wallet_name, pricing_unit_cost_id=data.pricing_unit_cost_id, @@ -167,6 +173,8 @@ async def create_licensed_item_purchase( licensed_item_purchase_id=licensed_item_purchase_db.licensed_item_purchase_id, product_name=licensed_item_purchase_db.product_name, licensed_item_id=licensed_item_purchase_db.licensed_item_id, + key=licensed_item_purchase_db.key, + version=licensed_item_purchase_db.version, wallet_id=licensed_item_purchase_db.wallet_id, wallet_name=licensed_item_purchase_db.wallet_name, pricing_unit_cost_id=licensed_item_purchase_db.pricing_unit_cost_id, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py index c0030898e45..d51e344d11c 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py @@ -17,6 +17,7 @@ resource_tracker_service_runs, ) from simcore_postgres_database.utils_repos import transaction_context +from sqlalchemy.engine import CursorResult from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine from ....exceptions.errors import CreditTransactionNotCreatedDBError @@ -165,6 +166,8 @@ async def batch_update_credit_transaction_status_for_in_debt_transactions( ) async with transaction_context(engine, connection) as conn: result = await conn.execute(update_stmt) + # NOTE: see https://docs.sqlalchemy.org/en/20/tutorial/data_update.html#getting-affected-row-count-from-update-delete + assert isinstance(result, CursorResult) # nosec if result.rowcount: _logger.info( "Wallet %s and project %s transactions in DEBT were changed to BILLED. Num. of transaction %s", diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_checkouts_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_checkouts_db.py index 5035a637199..96d98359cb6 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_checkouts_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_checkouts_db.py @@ -3,7 +3,6 @@ from typing import cast import sqlalchemy as sa -from models_library.licensed_items import LicensedItemID from models_library.products import ProductName from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -28,6 +27,7 @@ CreateLicensedItemCheckoutDB, LicensedItemCheckoutDB, ) +from . import utils as db_utils _logger = logging.getLogger(__name__) @@ -35,6 +35,8 @@ _SELECTION_ARGS = ( resource_tracker_licensed_items_checkouts.c.licensed_item_checkout_id, resource_tracker_licensed_items_checkouts.c.licensed_item_id, + resource_tracker_licensed_items_checkouts.c.key, + resource_tracker_licensed_items_checkouts.c.version, resource_tracker_licensed_items_checkouts.c.wallet_id, resource_tracker_licensed_items_checkouts.c.user_id, resource_tracker_licensed_items_checkouts.c.user_email, @@ -62,6 +64,8 @@ async def create( resource_tracker_licensed_items_checkouts.insert() .values( licensed_item_id=data.licensed_item_id, + key=data.key, + version=data.version, wallet_id=data.wallet_id, user_id=data.user_id, user_email=data.user_email, @@ -107,12 +111,19 @@ async def list_( # Ordering and pagination if order_by.direction == OrderDirection.ASC: list_query = base_query.order_by( - sa.asc(getattr(resource_tracker_licensed_items_checkouts.c, order_by.field)) + sa.asc( + getattr(resource_tracker_licensed_items_checkouts.c, order_by.field) + ), + resource_tracker_licensed_items_checkouts.c.licensed_item_checkout_id, ) else: list_query = base_query.order_by( sa.desc( - getattr(resource_tracker_licensed_items_checkouts.c, order_by.field) + getattr( + resource_tracker_licensed_items_checkouts.c, + order_by.field, + resource_tracker_licensed_items_checkouts.c.licensed_item_checkout_id, + ) ) ) list_query = list_query.offset(offset).limit(limit) @@ -194,21 +205,25 @@ async def update( return LicensedItemCheckoutDB.model_validate(row) -async def get_currently_used_seats_for_item_and_wallet( +async def get_currently_used_seats_for_key_version_wallet( engine: AsyncEngine, connection: AsyncConnection | None = None, *, - licensed_item_id: LicensedItemID, + key: str, + version: str, wallet_id: WalletID, product_name: ProductName, ) -> int: + sum_stmt = sa.select( sa.func.sum(resource_tracker_licensed_items_checkouts.c.num_of_seats) ).where( (resource_tracker_licensed_items_checkouts.c.wallet_id == wallet_id) + & (resource_tracker_licensed_items_checkouts.c.key == key) + # If purchased version >= requested version, it covers that version & ( - resource_tracker_licensed_items_checkouts.c.licensed_item_id - == licensed_item_id + db_utils.version(resource_tracker_licensed_items_checkouts.c.version) + >= db_utils.version(version) ) & (resource_tracker_licensed_items_checkouts.c.product_name == product_name) & (resource_tracker_licensed_items_checkouts.c.stopped_at.is_(None)) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py index fab4391628a..36cffd230a0 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/licensed_items_purchases_db.py @@ -2,7 +2,6 @@ from typing import cast import sqlalchemy as sa -from models_library.licensed_items import LicensedItemID from models_library.products import ProductName from models_library.resource_tracker_licensed_items_purchases import ( LicensedItemPurchaseID, @@ -24,11 +23,14 @@ CreateLicensedItemsPurchasesDB, LicensedItemsPurchasesDB, ) +from . import utils as db_utils _SELECTION_ARGS = ( resource_tracker_licensed_items_purchases.c.licensed_item_purchase_id, resource_tracker_licensed_items_purchases.c.product_name, resource_tracker_licensed_items_purchases.c.licensed_item_id, + resource_tracker_licensed_items_purchases.c.key, + resource_tracker_licensed_items_purchases.c.version, resource_tracker_licensed_items_purchases.c.wallet_id, resource_tracker_licensed_items_purchases.c.wallet_name, resource_tracker_licensed_items_purchases.c.pricing_unit_cost_id, @@ -59,6 +61,8 @@ async def create( .values( product_name=data.product_name, licensed_item_id=data.licensed_item_id, + key=data.key, + version=data.version, wallet_id=data.wallet_id, wallet_name=data.wallet_name, pricing_unit_cost_id=data.pricing_unit_cost_id, @@ -106,13 +110,17 @@ async def list_( # Ordering and pagination if order_by.direction == OrderDirection.ASC: list_query = base_query.order_by( - sa.asc(getattr(resource_tracker_licensed_items_purchases.c, order_by.field)) + sa.asc( + getattr(resource_tracker_licensed_items_purchases.c, order_by.field) + ), + resource_tracker_licensed_items_purchases.c.licensed_item_purchase_id, ) else: list_query = base_query.order_by( sa.desc( getattr(resource_tracker_licensed_items_purchases.c, order_by.field) - ) + ), + resource_tracker_licensed_items_purchases.c.licensed_item_purchase_id, ) list_query = list_query.offset(offset).limit(limit) @@ -158,11 +166,12 @@ async def get( return LicensedItemsPurchasesDB.model_validate(row) -async def get_active_purchased_seats_for_item_and_wallet( +async def get_active_purchased_seats_for_key_version_wallet( engine: AsyncEngine, connection: AsyncConnection | None = None, *, - licensed_item_id: LicensedItemID, + key: str, + version: str, wallet_id: WalletID, product_name: ProductName, ) -> int: @@ -175,9 +184,11 @@ async def get_active_purchased_seats_for_item_and_wallet( sa.func.sum(resource_tracker_licensed_items_purchases.c.num_of_seats) ).where( (resource_tracker_licensed_items_purchases.c.wallet_id == wallet_id) + & (resource_tracker_licensed_items_purchases.c.key == key) + # If purchased version >= requested version, it covers that version & ( - resource_tracker_licensed_items_purchases.c.licensed_item_id - == licensed_item_id + db_utils.version(resource_tracker_licensed_items_purchases.c.version) + >= db_utils.version(version) ) & (resource_tracker_licensed_items_purchases.c.product_name == product_name) & (resource_tracker_licensed_items_purchases.c.start_at <= _current_time) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/pricing_plans_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/pricing_plans_db.py index b205fb03997..397ba7e94e1 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/pricing_plans_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/pricing_plans_db.py @@ -13,6 +13,9 @@ PricingUnitWithCostUpdate, ) from models_library.services import ServiceKey, ServiceVersion +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + PricingUnitDuplicationError, +) from simcore_postgres_database.models.resource_tracker_pricing_plan_to_service import ( resource_tracker_pricing_plan_to_service, ) @@ -27,6 +30,7 @@ ) from simcore_postgres_database.utils_repos import transaction_context from sqlalchemy.dialects.postgresql import ARRAY, INTEGER +from sqlalchemy.exc import IntegrityError as SqlAlchemyIntegrityError from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine from ....exceptions.errors import ( @@ -208,7 +212,10 @@ async def list_pricing_plans_by_product( count_query = sa.select(sa.func.count()).select_from(subquery) # Default ordering - list_query = base_query.order_by(resource_tracker_pricing_plans.c.created.asc()) + list_query = base_query.order_by( + resource_tracker_pricing_plans.c.created.asc(), + resource_tracker_pricing_plans.c.pricing_plan_id, + ) total_count = await conn.scalar(count_query) if total_count is None: @@ -556,7 +563,10 @@ async def create_pricing_unit_with_cost( ) .returning(resource_tracker_pricing_units.c.pricing_unit_id) ) - result = await conn.execute(insert_stmt) + try: + result = await conn.execute(insert_stmt) + except SqlAlchemyIntegrityError as exc: + raise PricingUnitDuplicationError from exc row = result.first() if row is None: raise PricingUnitNotCreatedDBError(data=data) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py index 335c743baec..ed19570b523 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py @@ -9,7 +9,7 @@ from models_library.api_schemas_resource_usage_tracker.credit_transactions import ( WalletTotalCredits, ) -from models_library.api_schemas_storage import S3BucketName +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.products import ProductName from models_library.projects import ProjectID from models_library.resource_tracker import ( @@ -265,7 +265,7 @@ async def list_service_runs_by_product_and_user_and_wallet( isouter=True, ).join( _project_tags_subquery, - resource_tracker_service_runs.c.project_id + resource_tracker_service_runs.c.root_parent_project_id == _project_tags_subquery.c.project_uuid_for_rut, isouter=True, ) @@ -545,7 +545,7 @@ async def export_service_runs_table_to_s3( isouter=True, ).join( _project_tags_subquery, - resource_tracker_service_runs.c.project_id + resource_tracker_service_runs.c.root_parent_project_id == _project_tags_subquery.c.project_uuid_for_rut, isouter=True, ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/utils.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/utils.py new file mode 100644 index 00000000000..aa2c5d79926 --- /dev/null +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/utils.py @@ -0,0 +1,7 @@ +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import ARRAY, INTEGER + + +def version(column_or_value): + # converts version value string to array[integer] that can be compared + return sa.func.string_to_array(column_or_value, ".").cast(ARRAY(INTEGER)) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py index 54770c70fc1..b83ce3f49db 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py @@ -3,7 +3,7 @@ from aws_library.s3 import S3NotConnectedError, SimcoreS3API from fastapi import FastAPI -from models_library.api_schemas_storage import S3BucketName +from models_library.api_schemas_storage.storage_schemas import S3BucketName from pydantic import TypeAdapter from servicelib.logging_utils import log_context from settings_library.s3 import S3Settings diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_plans.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_plans.py index 65493d7f0b4..4ef13f1de09 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_plans.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_plans.py @@ -2,10 +2,10 @@ from fastapi import Depends from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingPlanPage, PricingPlanToServiceGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingPlanPage, + RutPricingUnitGet, ) from models_library.products import ProductName from models_library.resource_tracker import ( @@ -26,8 +26,8 @@ async def _create_pricing_plan_get( pricing_plan_db: PricingPlansDB, pricing_plan_unit_db: list[PricingUnitsDB] -) -> PricingPlanGet: - return PricingPlanGet( +) -> RutPricingPlanGet: + return RutPricingPlanGet( pricing_plan_id=pricing_plan_db.pricing_plan_id, display_name=pricing_plan_db.display_name, description=pricing_plan_db.description, @@ -35,7 +35,7 @@ async def _create_pricing_plan_get( created_at=pricing_plan_db.created, pricing_plan_key=pricing_plan_db.pricing_plan_key, pricing_units=[ - PricingUnitGet( + RutPricingUnitGet( pricing_unit_id=unit.pricing_unit_id, unit_name=unit.unit_name, unit_extra_info=unit.unit_extra_info, @@ -55,7 +55,7 @@ async def get_service_default_pricing_plan( service_key: ServiceKey, service_version: ServiceVersion, db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], -) -> PricingPlanGet: +) -> RutPricingPlanGet: active_service_pricing_plans = ( await pricing_plans_db.list_active_service_pricing_plans_by_product_and_service( db_engine, @@ -118,14 +118,14 @@ async def connect_service_to_pricing_plan( return TypeAdapter(PricingPlanToServiceGet).validate_python(output.model_dump()) -async def list_pricing_plans_by_product( +async def list_pricing_plans_without_pricing_units( db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], product_name: ProductName, exclude_inactive: bool, # pagination offset: int, limit: int, -) -> PricingPlanPage: +) -> RutPricingPlanPage: total, pricing_plans_list_db = await pricing_plans_db.list_pricing_plans_by_product( db_engine, product_name=product_name, @@ -133,9 +133,9 @@ async def list_pricing_plans_by_product( offset=offset, limit=limit, ) - return PricingPlanPage( + return RutPricingPlanPage( items=[ - PricingPlanGet( + RutPricingPlanGet( pricing_plan_id=pricing_plan_db.pricing_plan_id, display_name=pricing_plan_db.display_name, description=pricing_plan_db.description, @@ -155,7 +155,7 @@ async def get_pricing_plan( product_name: ProductName, pricing_plan_id: PricingPlanId, db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], -) -> PricingPlanGet: +) -> RutPricingPlanGet: pricing_plan_db = await pricing_plans_db.get_pricing_plan( db_engine, product_name=product_name, pricing_plan_id=pricing_plan_id ) @@ -168,7 +168,7 @@ async def get_pricing_plan( async def create_pricing_plan( data: PricingPlanCreate, db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], -) -> PricingPlanGet: +) -> RutPricingPlanGet: pricing_plan_db = await pricing_plans_db.create_pricing_plan(db_engine, data=data) pricing_plan_unit_db = await pricing_plans_db.list_pricing_units_by_pricing_plan( db_engine, pricing_plan_id=pricing_plan_db.pricing_plan_id @@ -180,7 +180,7 @@ async def update_pricing_plan( product_name: ProductName, data: PricingPlanUpdate, db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], -) -> PricingPlanGet: +) -> RutPricingPlanGet: # Check whether pricing plan exists pricing_plan_db = await pricing_plans_db.get_pricing_plan( db_engine, product_name=product_name, pricing_plan_id=data.pricing_plan_id diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_units.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_units.py index 0a1e72cad65..e0867fad494 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_units.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/pricing_units.py @@ -2,7 +2,7 @@ from fastapi import Depends from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingUnitGet, + RutPricingUnitGet, ) from models_library.products import ProductName from models_library.resource_tracker import ( @@ -22,7 +22,7 @@ async def get_pricing_unit( pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], -) -> PricingUnitGet: +) -> RutPricingUnitGet: pricing_unit = await pricing_plans_db.get_valid_pricing_unit( db_engine, product_name=product_name, @@ -30,7 +30,7 @@ async def get_pricing_unit( pricing_unit_id=pricing_unit_id, ) - return PricingUnitGet( + return RutPricingUnitGet( pricing_unit_id=pricing_unit.pricing_unit_id, unit_name=pricing_unit.unit_name, unit_extra_info=pricing_unit.unit_extra_info, @@ -45,7 +45,7 @@ async def create_pricing_unit( product_name: ProductName, data: PricingUnitWithCostCreate, db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], -) -> PricingUnitGet: +) -> RutPricingUnitGet: # Check whether pricing plan exists pricing_plan_db = await pricing_plans_db.get_pricing_plan( db_engine, product_name=product_name, pricing_plan_id=data.pricing_plan_id @@ -61,7 +61,7 @@ async def create_pricing_unit( pricing_plan_id=data.pricing_plan_id, pricing_unit_id=pricing_unit_id, ) - return PricingUnitGet( + return RutPricingUnitGet( pricing_unit_id=pricing_unit.pricing_unit_id, unit_name=pricing_unit.unit_name, unit_extra_info=pricing_unit.unit_extra_info, @@ -76,7 +76,7 @@ async def update_pricing_unit( product_name: ProductName, data: PricingUnitWithCostUpdate, db_engine: Annotated[AsyncEngine, Depends(get_resource_tracker_db_engine)], -) -> PricingUnitGet: +) -> RutPricingUnitGet: # Check whether pricing unit exists await pricing_plans_db.get_valid_pricing_unit( db_engine, @@ -100,7 +100,7 @@ async def update_pricing_unit( pricing_plan_id=data.pricing_plan_id, pricing_unit_id=data.pricing_unit_id, ) - return PricingUnitGet( + return RutPricingUnitGet( pricing_unit_id=pricing_unit.pricing_unit_id, unit_name=pricing_unit.unit_name, unit_extra_info=pricing_unit.unit_extra_info, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py index db9b8096f32..9a9a1398712 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py @@ -12,7 +12,7 @@ ServiceRunGet, ServiceRunPage, ) -from models_library.api_schemas_storage import S3BucketName +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.products import ProductName from models_library.projects import ProjectID from models_library.resource_tracker import ( diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_checkouts.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_checkouts.py index b1036c49aef..95c15a38652 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_checkouts.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_checkouts.py @@ -22,9 +22,15 @@ licensed_items_checkouts, licensed_items_purchases, ) +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + NotEnoughAvailableSeatsError, +) from simcore_postgres_database.models.resource_tracker_licensed_items_checkouts import ( resource_tracker_licensed_items_checkouts, ) +from simcore_postgres_database.models.resource_tracker_licensed_items_purchases import ( + resource_tracker_licensed_items_purchases, +) from simcore_postgres_database.models.resource_tracker_service_runs import ( resource_tracker_service_runs, ) @@ -62,6 +68,7 @@ def resource_tracker_service_run_id( yield row[0] con.execute(resource_tracker_licensed_items_checkouts.delete()) + con.execute(resource_tracker_licensed_items_purchases.delete()) con.execute(resource_tracker_service_runs.delete()) @@ -83,6 +90,8 @@ async def test_rpc_licensed_items_checkouts_workflow( _create_data = LicensedItemsPurchasesCreate( product_name="osparc", licensed_item_id="beb16d18-d57d-44aa-a638-9727fa4a72ef", + key="Duke", + version="1.0.0", wallet_id=_WALLET_ID, wallet_name="My Wallet", pricing_plan_id=1, @@ -104,6 +113,8 @@ async def test_rpc_licensed_items_checkouts_workflow( checkout = await licensed_items_checkouts.checkout_licensed_item( rpc_client, licensed_item_id=created_item.licensed_item_id, + key=created_item.key, + version=created_item.version, wallet_id=_WALLET_ID, product_name="osparc", num_of_seats=3, @@ -137,3 +148,134 @@ async def test_rpc_licensed_items_checkouts_workflow( ) assert license_item_checkout assert isinstance(license_item_checkout.stopped_at, datetime) + + +async def test_rpc_licensed_items_checkouts_can_checkout_older_version( + mocked_redis_server: None, + resource_tracker_service_run_id: str, + rpc_client: RabbitMQRPCClient, +): + # List licensed items checkouts + output = await licensed_items_checkouts.get_licensed_items_checkouts_page( + rpc_client, + product_name="osparc", + filter_wallet_id=_WALLET_ID, + ) + assert output.total == 0 + assert output.items == [] + + # Purchase license item + _create_data = LicensedItemsPurchasesCreate( + product_name="osparc", + licensed_item_id="beb16d18-d57d-44aa-a638-9727fa4a72ef", + key="Duke", + version="2.0.0", + wallet_id=_WALLET_ID, + wallet_name="My Wallet", + pricing_plan_id=1, + pricing_unit_id=1, + pricing_unit_cost_id=1, + pricing_unit_cost=Decimal(10), + start_at=datetime.now(tz=UTC), + expire_at=datetime.now(tz=UTC) + timedelta(days=1), + num_of_seats=5, + purchased_by_user=_USER_ID_1, + user_email="test@test.com", + purchased_at=datetime.now(tz=UTC), + ) + created_item = await licensed_items_purchases.create_licensed_item_purchase( + rpc_client, data=_create_data + ) + + # Checkout with num of seats + checkout = await licensed_items_checkouts.checkout_licensed_item( + rpc_client, + licensed_item_id=created_item.licensed_item_id, + key="Duke", + version="1.0.0", # <-- Older version + wallet_id=_WALLET_ID, + product_name="osparc", + num_of_seats=3, + service_run_id=resource_tracker_service_run_id, + user_id=_USER_ID_1, + user_email="test@test.com", + ) + + # List licensed items checkouts + output = await licensed_items_checkouts.get_licensed_items_checkouts_page( + rpc_client, + product_name="osparc", + filter_wallet_id=_WALLET_ID, + ) + assert output.total == 1 + assert isinstance(output, LicensedItemsCheckoutsPage) + + # Get licensed items checkouts + output = await licensed_items_checkouts.get_licensed_item_checkout( + rpc_client, + product_name="osparc", + licensed_item_checkout_id=output.items[0].licensed_item_checkout_id, + ) + assert isinstance(output, LicensedItemCheckoutGet) + + # Release num of seats + license_item_checkout = await licensed_items_checkouts.release_licensed_item( + rpc_client, + licensed_item_checkout_id=checkout.licensed_item_checkout_id, + product_name="osparc", + ) + assert license_item_checkout + assert isinstance(license_item_checkout.stopped_at, datetime) + + +async def test_rpc_licensed_items_checkouts_can_not_checkout_newer_version( + mocked_redis_server: None, + resource_tracker_service_run_id: str, + rpc_client: RabbitMQRPCClient, +): + # List licensed items checkouts + output = await licensed_items_checkouts.get_licensed_items_checkouts_page( + rpc_client, + product_name="osparc", + filter_wallet_id=_WALLET_ID, + ) + assert output.total == 0 + assert output.items == [] + + # Purchase license item + _create_data = LicensedItemsPurchasesCreate( + product_name="osparc", + licensed_item_id="beb16d18-d57d-44aa-a638-9727fa4a72ef", + key="Duke", + version="2.0.0", # <-- Older version + wallet_id=_WALLET_ID, + wallet_name="My Wallet", + pricing_plan_id=1, + pricing_unit_id=1, + pricing_unit_cost_id=1, + pricing_unit_cost=Decimal(10), + start_at=datetime.now(tz=UTC), + expire_at=datetime.now(tz=UTC) + timedelta(days=1), + num_of_seats=5, + purchased_by_user=_USER_ID_1, + user_email="test@test.com", + purchased_at=datetime.now(tz=UTC), + ) + created_item = await licensed_items_purchases.create_licensed_item_purchase( + rpc_client, data=_create_data + ) + + # Checkout with num of seats + with pytest.raises(NotEnoughAvailableSeatsError): + await licensed_items_checkouts.checkout_licensed_item( + rpc_client, + licensed_item_id=created_item.licensed_item_id, + key="Duke", + version="3.0.0", # <-- Newer version + wallet_id=_WALLET_ID, + product_name="osparc", + num_of_seats=3, + service_run_id=resource_tracker_service_run_id, + user_id=_USER_ID_1, + user_email="test@test.com", + ) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_purchases.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_purchases.py index e5920728d3c..bead8f804b3 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_purchases.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_licensed_items_purchases.py @@ -43,6 +43,8 @@ async def test_rpc_licensed_items_purchases_workflow( _create_data = LicensedItemsPurchasesCreate( product_name="osparc", licensed_item_id="beb16d18-d57d-44aa-a638-9727fa4a72ef", + key="Duke", + version="1.0.0", wallet_id=1, wallet_name="My Wallet", pricing_plan_id=1, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans.py index 8aea2c291bf..02bc5758038 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans.py @@ -14,7 +14,7 @@ import httpx import pytest import sqlalchemy as sa -from models_library.resource_tracker import UnitExtraInfo +from models_library.resource_tracker import UnitExtraInfoTier from simcore_postgres_database.models.resource_tracker_pricing_plan_to_service import ( resource_tracker_pricing_plan_to_service, ) @@ -76,7 +76,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="S", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=False, @@ -103,7 +103,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="M", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=True, @@ -130,7 +130,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="L", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=False, @@ -171,7 +171,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID_2, unit_name="XXL", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=True, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans_rpc.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans_rpc.py index ee342c0081c..59087705de0 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans_rpc.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_pricing_plans_rpc.py @@ -5,10 +5,10 @@ import sqlalchemy as sa from faker import Faker from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingPlanPage, PricingPlanToServiceGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingPlanPage, + RutPricingUnitGet, ) from models_library.resource_tracker import ( PricingPlanClassification, @@ -18,7 +18,8 @@ PricingUnitWithCostCreate, PricingUnitWithCostUpdate, SpecificInfo, - UnitExtraInfo, + UnitExtraInfoLicense, + UnitExtraInfoTier, ) from models_library.services import ServiceKey, ServiceVersion from servicelib.rabbitmq import RabbitMQRPCClient @@ -26,6 +27,9 @@ pricing_plans, pricing_units, ) +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + PricingUnitDuplicationError, +) from simcore_postgres_database.models.resource_tracker_pricing_plan_to_service import ( resource_tracker_pricing_plan_to_service, ) @@ -111,7 +115,7 @@ async def test_rpc_pricing_plans_workflow( pricing_plan_key=faker.word(), ), ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) assert result.pricing_units == [] assert result.display_name == _display_name _pricing_plan_id = result.pricing_plan_id @@ -128,7 +132,7 @@ async def test_rpc_pricing_plans_workflow( is_active=True, ), ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) assert result.pricing_units == [] assert result.display_name == _update_display_name assert result.description == _update_description @@ -138,20 +142,20 @@ async def test_rpc_pricing_plans_workflow( product_name="osparc", pricing_plan_id=_pricing_plan_id, ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) assert result.pricing_units == [] assert result.display_name == _update_display_name assert result.description == _update_description assert result.is_active is True - result = await pricing_plans.list_pricing_plans( + result = await pricing_plans.list_pricing_plans_without_pricing_units( rpc_client, product_name="osparc", ) - assert isinstance(result, PricingPlanPage) + assert isinstance(result, RutPricingPlanPage) assert result.total == 1 assert len(result.items) == 1 - assert isinstance(result.items[0], PricingPlanGet) + assert isinstance(result.items[0], RutPricingPlanGet) assert result.items[0].pricing_units is None # Now I will deactivate the pricing plan @@ -165,7 +169,7 @@ async def test_rpc_pricing_plans_workflow( is_active=False, # <-- deactivate ), ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) assert result.is_active is False @@ -186,7 +190,7 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_key=faker.word(), ), ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) assert result.pricing_units == [] assert result.display_name == _display_name _pricing_plan_id = result.pricing_plan_id @@ -197,27 +201,44 @@ async def test_rpc_pricing_plans_with_units_workflow( data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="SMALL", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ - 0 - ], + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), cost_per_unit=Decimal(10), comment=faker.sentence(), ), ) - assert isinstance(result, PricingUnitGet) + assert isinstance(result, RutPricingUnitGet) assert result _first_pricing_unit_id = result.pricing_unit_id _current_cost_per_unit_id = result.current_cost_per_unit_id + with pytest.raises(PricingUnitDuplicationError): + await pricing_units.create_pricing_unit( + rpc_client, + product_name="osparc", + data=PricingUnitWithCostCreate( + pricing_plan_id=_pricing_plan_id, + unit_name="SMALL", + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ + "examples" + ][0], + default=True, + specific_info=SpecificInfo(aws_ec2_instances=[]), + cost_per_unit=Decimal(10), + comment=faker.sentence(), + ), + ) + # Get pricing plan result = await pricing_plans.get_pricing_plan( rpc_client, product_name="osparc", pricing_plan_id=_pricing_plan_id, ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) assert result.pricing_units assert len(result.pricing_units) == 1 assert result.pricing_units[0].pricing_unit_id == _first_pricing_unit_id @@ -231,15 +252,15 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, unit_name=_unit_name, - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ - 0 - ], + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), pricing_unit_cost_update=None, ), ) - assert isinstance(result, PricingUnitGet) + assert isinstance(result, RutPricingUnitGet) assert result.unit_name == _unit_name assert result.current_cost_per_unit == Decimal(10) assert result.current_cost_per_unit_id == _current_cost_per_unit_id @@ -252,9 +273,9 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, unit_name="MEDIUM", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ - 0 - ], + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), pricing_unit_cost_update=PricingUnitCostUpdate( @@ -263,7 +284,7 @@ async def test_rpc_pricing_plans_with_units_workflow( ), ), ) - assert isinstance(result, PricingUnitGet) + assert isinstance(result, RutPricingUnitGet) assert result.unit_name == "MEDIUM" assert result.current_cost_per_unit == Decimal(15) assert result.current_cost_per_unit_id != _current_cost_per_unit_id @@ -275,7 +296,7 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, ) - assert isinstance(result, PricingUnitGet) + assert isinstance(result, RutPricingUnitGet) assert result.current_cost_per_unit == Decimal(15) # Create one more unit @@ -285,16 +306,16 @@ async def test_rpc_pricing_plans_with_units_workflow( data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="LARGE", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ - 0 - ], + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info=SpecificInfo(aws_ec2_instances=[]), cost_per_unit=Decimal(20), comment=faker.sentence(), ), ) - assert isinstance(result, PricingUnitGet) + assert isinstance(result, RutPricingUnitGet) assert result _second_pricing_unit_id = result.pricing_unit_id @@ -304,7 +325,7 @@ async def test_rpc_pricing_plans_with_units_workflow( product_name="osparc", pricing_plan_id=_pricing_plan_id, ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) assert result.pricing_units assert len(result.pricing_units) == 2 assert result.pricing_units[0].pricing_unit_id == _first_pricing_unit_id @@ -327,7 +348,7 @@ async def test_rpc_pricing_plans_to_service_workflow( pricing_plan_key=faker.word(), ), ) - assert isinstance(result, PricingPlanGet) + assert isinstance(result, RutPricingPlanGet) _pricing_plan_id = result.pricing_plan_id result = ( @@ -407,3 +428,145 @@ async def test_rpc_pricing_plans_to_service_workflow( ) assert isinstance(result, list) assert len(result) == 3 + + +async def test_rpc_pricing_plans_with_units_workflow__for_licenses( + mocked_redis_server: None, + resource_tracker_setup_db: None, + rpc_client: RabbitMQRPCClient, + faker: Faker, +): + _display_name = faker.word() + result = await pricing_plans.create_pricing_plan( + rpc_client, + data=PricingPlanCreate( + product_name="osparc", + display_name=_display_name, + description=faker.sentence(), + classification=PricingPlanClassification.LICENSE, + pricing_plan_key=faker.word(), + ), + ) + assert isinstance(result, RutPricingPlanGet) + assert result.pricing_units == [] + assert result.display_name == _display_name + _pricing_plan_id = result.pricing_plan_id + + result = await pricing_units.create_pricing_unit( + rpc_client, + product_name="osparc", + data=PricingUnitWithCostCreate( + pricing_plan_id=_pricing_plan_id, + unit_name="VIP MODEL", + unit_extra_info=UnitExtraInfoLicense.model_config["json_schema_extra"][ + "examples" + ][0], + default=True, + specific_info=SpecificInfo(aws_ec2_instances=[]), + cost_per_unit=Decimal(10), + comment=faker.sentence(), + ), + ) + assert isinstance(result, RutPricingUnitGet) + assert result + _first_pricing_unit_id = result.pricing_unit_id + _current_cost_per_unit_id = result.current_cost_per_unit_id + + # Get pricing plan + result = await pricing_plans.get_pricing_plan( + rpc_client, + product_name="osparc", + pricing_plan_id=_pricing_plan_id, + ) + assert isinstance(result, RutPricingPlanGet) + assert result.pricing_units + assert len(result.pricing_units) == 1 + assert result.pricing_units[0].pricing_unit_id == _first_pricing_unit_id + + # Update only pricing unit info with COST update + _unit_name = "1 seat" + result = await pricing_units.update_pricing_unit( + rpc_client, + product_name="osparc", + data=PricingUnitWithCostUpdate( + pricing_plan_id=_pricing_plan_id, + pricing_unit_id=_first_pricing_unit_id, + unit_name=_unit_name, + unit_extra_info=UnitExtraInfoLicense.model_config["json_schema_extra"][ + "examples" + ][0], + default=True, + specific_info=SpecificInfo(aws_ec2_instances=[]), + pricing_unit_cost_update=None, + ), + ) + assert isinstance(result, RutPricingUnitGet) + assert result.unit_name == _unit_name + assert result.current_cost_per_unit == Decimal(10) + assert result.current_cost_per_unit_id == _current_cost_per_unit_id + + # Update pricing unit with COST update! + result = await pricing_units.update_pricing_unit( + rpc_client, + product_name="osparc", + data=PricingUnitWithCostUpdate( + pricing_plan_id=_pricing_plan_id, + pricing_unit_id=_first_pricing_unit_id, + unit_name=_unit_name, + unit_extra_info=UnitExtraInfoLicense.model_config["json_schema_extra"][ + "examples" + ][0], + default=True, + specific_info=SpecificInfo(aws_ec2_instances=[]), + pricing_unit_cost_update=PricingUnitCostUpdate( + cost_per_unit=Decimal(15), + comment="Comment update", + ), + ), + ) + assert isinstance(result, RutPricingUnitGet) + assert result.unit_name == _unit_name + assert result.current_cost_per_unit == Decimal(15) + assert result.current_cost_per_unit_id != _current_cost_per_unit_id + + # Test get pricing unit + result = await pricing_units.get_pricing_unit( + rpc_client, + product_name="osparc", + pricing_plan_id=_pricing_plan_id, + pricing_unit_id=_first_pricing_unit_id, + ) + assert isinstance(result, RutPricingUnitGet) + assert result.current_cost_per_unit == Decimal(15) + + # Create one more unit + result = await pricing_units.create_pricing_unit( + rpc_client, + product_name="osparc", + data=PricingUnitWithCostCreate( + pricing_plan_id=_pricing_plan_id, + unit_name="5 seats", + unit_extra_info=UnitExtraInfoLicense.model_config["json_schema_extra"][ + "examples" + ][0], + default=False, + specific_info=SpecificInfo(aws_ec2_instances=[]), + cost_per_unit=Decimal(20), + comment=faker.sentence(), + ), + ) + assert isinstance(result, RutPricingUnitGet) + assert result + _second_pricing_unit_id = result.pricing_unit_id + + # Get pricing plan with units + result = await pricing_plans.get_pricing_plan( + rpc_client, + product_name="osparc", + pricing_plan_id=_pricing_plan_id, + ) + assert isinstance(result, RutPricingPlanGet) + assert result.pricing_units + assert len(result.pricing_units) == 2 + assert result.pricing_units[0].pricing_unit_id == _first_pricing_unit_id + assert result.pricing_units[1].pricing_unit_id == _second_pricing_unit_id diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_licensed_items_checkouts_db.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_licensed_items_checkouts_db.py index 5f0fc5a1f5b..11a7015e490 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_licensed_items_checkouts_db.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_licensed_items_checkouts_db.py @@ -71,6 +71,8 @@ async def test_licensed_items_checkouts_db__force_release_license_seats_by_run_i # SETUP _create_license_item_checkout_db_1 = CreateLicensedItemCheckoutDB( licensed_item_id="beb16d18-d57d-44aa-a638-9727fa4a72ef", + key="Duke", + version="1.0.0", wallet_id=_WALLET_ID, user_id=_USER_ID_1, user_email="test@test.com", diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py index 6b0048edf61..04f6c19d02e 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py @@ -11,7 +11,7 @@ RabbitResourceTrackingStoppedMessage, SimcorePlatformStatus, ) -from models_library.resource_tracker import UnitExtraInfo +from models_library.resource_tracker import UnitExtraInfoTier from servicelib.rabbitmq import RabbitMQClient from simcore_postgres_database.models.resource_tracker_credit_transactions import ( resource_tracker_credit_transactions, @@ -58,7 +58,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=False, @@ -85,7 +85,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="M", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=True, @@ -112,7 +112,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="L", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=False, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py index 3dac5fffe1a..bdffb9cec4e 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py @@ -15,7 +15,7 @@ from models_library.resource_tracker import ( CreditClassification, CreditTransactionStatus, - UnitExtraInfo, + UnitExtraInfoTier, ) from pytest_mock.plugin import MockerFixture from servicelib.rabbitmq import RabbitMQClient @@ -73,7 +73,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=False, @@ -100,7 +100,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="M", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=True, @@ -127,7 +127,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="L", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=False, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py index ccffbc9f42e..6c14cc32e13 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py @@ -12,7 +12,7 @@ SimcorePlatformStatus, WalletCreditsLimitReachedMessage, ) -from models_library.resource_tracker import UnitExtraInfo +from models_library.resource_tracker import UnitExtraInfoTier from pytest_mock.plugin import MockerFixture from servicelib.rabbitmq import RabbitMQClient from simcore_postgres_database.models.resource_tracker_credit_transactions import ( @@ -65,7 +65,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + unit_extra_info=UnitExtraInfoTier.model_config["json_schema_extra"][ "examples" ][0], default=False, diff --git a/services/static-webserver/client/.gitignore b/services/static-webserver/client/.gitignore index e661698d295..64479ebc19f 100644 --- a/services/static-webserver/client/.gitignore +++ b/services/static-webserver/client/.gitignore @@ -14,6 +14,3 @@ source/resource/iconfont/material # generator outputs /api/ /test/ - -# translations for the moment ignored -*.po diff --git a/services/static-webserver/client/Makefile b/services/static-webserver/client/Makefile index 9e01133e058..89fafcca1ae 100644 --- a/services/static-webserver/client/Makefile +++ b/services/static-webserver/client/Makefile @@ -88,6 +88,17 @@ serve: compile ## serves site compiled in image in 127.0.0.1:8080 docker run --rm -p 8080:8080 $(docker_image) $(qx_serve) --target=build +# qx translate -------------------------- + +define qx_translate_extract = + qx compile --update-po-files +endef + +.PHONY: translate-extract +translate-extract: translate-extract ## the generated .po files goes to source/translation https://qooxdoo.org/documentation/v7.8/#/development/howto/internationalization?id=translation + # qx compile --update-po-files + $(docker_compose) run $(if $(detached),--detach --name=$(detached),--rm) qooxdoo-kit $(qx_translate_extract) + # misc -------------------------- .PHONY: shell diff --git a/services/static-webserver/client/source/class/osparc/Application.js b/services/static-webserver/client/source/class/osparc/Application.js index 463ddbd3492..fc167e79592 100644 --- a/services/static-webserver/client/source/class/osparc/Application.js +++ b/services/static-webserver/client/source/class/osparc/Application.js @@ -65,7 +65,6 @@ qx.Class.define("osparc.Application", { // libs osparc.wrapper.IntlTelInput.getInstance().init(); - osparc.wrapper.Three.getInstance().init(); // trackers osparc.announcement.Tracker.getInstance().startTracker(); @@ -75,7 +74,7 @@ qx.Class.define("osparc.Application", { const webSocket = osparc.wrapper.WebSocket.getInstance(); webSocket.addListener("connect", () => osparc.WatchDog.getInstance().setOnline(true)); webSocket.addListener("disconnect", () => osparc.WatchDog.getInstance().setOnline(false)); - webSocket.addListener("logout", () => this.logout(qx.locale.Manager.tr("You were logged out"))); + webSocket.addListener("logout", () => this.logout(qx.locale.Manager.tr("You have been logged out"))); // alert the users that they are about to navigate away // from osparc. unfortunately it is not possible // to provide our own message here @@ -372,7 +371,7 @@ qx.Class.define("osparc.Application", { __checkNewRelease: function() { if (osparc.NewRelease.firstTimeISeeThisFrontend()) { const newRelease = new osparc.NewRelease(); - const title = this.tr("New Release"); + const title = this.tr("New Version Released"); const win = osparc.ui.window.Window.popUpInWindow(newRelease, title, 350, 135).set({ clickAwayClose: false, resizable: false, @@ -461,7 +460,7 @@ qx.Class.define("osparc.Application", { if (osparc.auth.Data.getInstance().isGuest()) { const msg = osparc.utils.Utils.createAccountMessage(); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } else if (profile["expirationDate"]) { const now = new Date(); const today = new Date(now.toISOString().slice(0, 10)); @@ -469,7 +468,7 @@ qx.Class.define("osparc.Application", { const daysToExpiration = osparc.utils.Utils.daysBetween(today, expirationDay); if (daysToExpiration < 7) { const msg = osparc.utils.Utils.expirationMessage(daysToExpiration); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } } @@ -562,15 +561,15 @@ qx.Class.define("osparc.Application", { __loggedOut: function(forcedReason) { if (forcedReason) { - osparc.FlashMessenger.getInstance().logAs(forcedReason, "WARNING", 0); + osparc.FlashMessenger.logAs(forcedReason, "WARNING", 0); } else { - osparc.FlashMessenger.getInstance().logAs(this.tr("You are logged out"), "INFO"); + osparc.FlashMessenger.logAs(this.tr("You have been logged out"), "INFO"); } this.__closeAllAndToLoginPage(); }, __closeAllAndToLoginPage: function() { - osparc.data.PollTasks.getInstance().removeTasks(); + osparc.store.PollTasks.getInstance().removeTasks(); osparc.MaintenanceTracker.getInstance().stopTracker(); osparc.CookieExpirationTracker.getInstance().stopTracker(); osparc.NewUITracker.getInstance().stopTracker(); diff --git a/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js b/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js index 92282bde074..f3584c4c39d 100644 --- a/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js +++ b/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js @@ -103,7 +103,7 @@ qx.Class.define("osparc.CookieExpirationTracker", { const timeout = osparc.utils.Utils.formatSeconds(timeoutSec); const text = qx.locale.Manager.tr(`Your session will expire in ${timeout}.
Please log out and log in again.`); if (this.__message === null) { - this.__message = osparc.FlashMessenger.getInstance().logAs(text, "WARNING", timeoutSec*1000); + this.__message = osparc.FlashMessenger.logAs(text, "WARNING", timeoutSec*1000); this.__message.getChildControl("closebutton").exclude(); } else { this.__message.setMessage(text); @@ -112,7 +112,7 @@ qx.Class.define("osparc.CookieExpirationTracker", { // /FLASH MESSAGE // __logoutUser: function() { - const reason = qx.locale.Manager.tr("Session expired"); + const reason = qx.locale.Manager.tr("Your session has expired"); qx.core.Init.getApplication().logout(reason); } } diff --git a/services/static-webserver/client/source/class/osparc/DownloadLinkTracker.js b/services/static-webserver/client/source/class/osparc/DownloadLinkTracker.js index 27128e42ad0..af259e97a8c 100644 --- a/services/static-webserver/client/source/class/osparc/DownloadLinkTracker.js +++ b/services/static-webserver/client/source/class/osparc/DownloadLinkTracker.js @@ -33,9 +33,10 @@ qx.Class.define("osparc.DownloadLinkTracker", { downloadAnchorNode.setAttribute("href", url); downloadAnchorNode.setAttribute("download", fileName); downloadAnchorNode.setAttribute("osparc", "downloadFile"); + document.body.appendChild(downloadAnchorNode); this.setDownloading(true); downloadAnchorNode.click(); - downloadAnchorNode.remove(); + document.body.removeChild(downloadAnchorNode); // This is needed to make it work in Firefox setTimeout(() => this.setDownloading(false), 100); } diff --git a/services/static-webserver/client/source/class/osparc/FlashMessenger.js b/services/static-webserver/client/source/class/osparc/FlashMessenger.js index eb0dfe7cab3..d9c886fd506 100644 --- a/services/static-webserver/client/source/class/osparc/FlashMessenger.js +++ b/services/static-webserver/client/source/class/osparc/FlashMessenger.js @@ -28,7 +28,7 @@ * Here is a little example of how to use the class. * *
- *   osparc.FlashMessenger.getInstance().logAs(log);
+ *   osparc.FlashMessenger.logAs(log);
  * 
*/ @@ -55,9 +55,67 @@ qx.Class.define("osparc.FlashMessenger", { statics: { MAX_DISPLAYED: 3, + + extractMessage: function(input, defaultMessage = "") { + const isValidString = val => { + return ( + typeof val === "string" || + (osparc.utils.Utils.isObject(val) && ("basename" in val) && (val.basename === "LocalizedString")) + ); + } + if (input) { + if (isValidString(input)) { + return input; + } else if (osparc.utils.Utils.isObject(input) && "message" in input) { + if (isValidString(input["message"])) { + return input["message"]; + } else if (osparc.utils.Utils.isObject(input["message"]) && "message" in input["message"] && isValidString(input["message"]["message"])) { + return input["message"]["message"]; + } + } + } + return defaultMessage; + }, + logAs: function(message, level, duration) { return this.getInstance().logAs(message, level, duration); - } + }, + + logError: function(error, defaultMessage = qx.locale.Manager.tr("Oops... something went wrong"), duration = null) { + if (error) { + console.error(error); + } + const msg = this.extractMessage(error, defaultMessage); + const flashMessage = this.getInstance().logAs(msg, "ERROR", duration); + if (error && error["supportId"]) { + flashMessage.addWidget(this.__createCopyOECWidget(msg, error["supportId"])); + flashMessage.setDuration(flashMessage.getDuration()*2); + } + return flashMessage; + }, + + __createCopyOECWidget: function(message, supportId) { + const errorLabel = new qx.ui.basic.Atom().set({ + label: supportId, + icon: "@FontAwesome5Solid/copy/10", + iconPosition: "right", + gap: 8, + cursor: "pointer", + alignX: "center", + allowGrowX: false, + }); + errorLabel.addListener("tap", () => { + const dataToClipboard = { + message, + supportId, + timestamp: new Date().toString(), + url: window.location.href, + studyId: osparc.store.Store.getInstance().getCurrentStudy() || "", + } + osparc.utils.Utils.copyTextToClipboard(JSON.stringify(dataToClipboard)); + }); + return errorLabel; + }, }, members: { @@ -68,7 +126,7 @@ qx.Class.define("osparc.FlashMessenger", { /** * Public function to log a FlashMessage to the user. * - * @param {String} message Message that the message will show. + * @param {String || Object} message Message (or Object containing the message) that the message will show. * @param {String="INFO","DEBUG","WARNING","ERROR"} level Level of the warning. The color of the badge will change accordingly. * @param {Number} duration */ @@ -81,9 +139,7 @@ qx.Class.define("osparc.FlashMessenger", { }, log: function(logMessage) { - const message = osparc.utils.Utils.isObject(logMessage.message) && "message" in logMessage.message ? - logMessage.message.message : - logMessage.message; + const message = this.self().extractMessage(logMessage); const level = logMessage.level.toUpperCase(); // "DEBUG", "INFO", "WARNING", "ERROR" @@ -115,14 +171,9 @@ qx.Class.define("osparc.FlashMessenger", { } this.__displayedMessagesCount++; - let duration = flashMessage.getDuration(); - if (duration === null) { - const message = flashMessage.getMessage(); - const wordCount = message.split(" ").length; - duration = Math.max(5500, wordCount*500); // An average reader takes 300ms to read a word - } + const duration = flashMessage.getDuration(); if (duration !== 0) { - qx.event.Timer.once(() => this.removeMessage(flashMessage), this, duration); + flashMessage.timer = setTimeout(() => this.removeMessage(flashMessage), duration); } }, diff --git a/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js b/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js index d6aa69a3084..4a8d552deff 100644 --- a/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js +++ b/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js @@ -166,7 +166,7 @@ qx.Class.define("osparc.MaintenanceTracker", { end: null, reason: null }); - const reason = qx.locale.Manager.tr("We are under maintenance. Please check back later"); + const reason = qx.locale.Manager.tr("The service is under maintenance. Please check back later"); qx.core.Init.getApplication().logout(reason); }, diff --git a/services/static-webserver/client/source/class/osparc/NewRelease.js b/services/static-webserver/client/source/class/osparc/NewRelease.js index bac9d1efb25..939c4509234 100644 --- a/services/static-webserver/client/source/class/osparc/NewRelease.js +++ b/services/static-webserver/client/source/class/osparc/NewRelease.js @@ -70,11 +70,11 @@ qx.Class.define("osparc.NewRelease", { }); this._add(introLabel); - const rData = osparc.store.StaticInfo.getInstance().getReleaseData(); - const url = rData["url"] || osparc.utils.LibVersions.getVcsRefUrl(); + const releaseTag = osparc.utils.Utils.getReleaseTag(); + const releaseLink = osparc.utils.Utils.getReleaseLink(); const linkLabel = new osparc.ui.basic.LinkLabel().set({ - value: this.tr("What's new"), - url, + value: this.tr("What's new in ") + releaseTag, + url: releaseLink, font: "link-label-14" }); this._add(linkLabel); diff --git a/services/static-webserver/client/source/class/osparc/NewUITracker.js b/services/static-webserver/client/source/class/osparc/NewUITracker.js index c85fb3f9390..3955b4ef2b5 100644 --- a/services/static-webserver/client/source/class/osparc/NewUITracker.js +++ b/services/static-webserver/client/source/class/osparc/NewUITracker.js @@ -36,7 +36,7 @@ qx.Class.define("osparc.NewUITracker", { msg += "
"; msg += qx.locale.Manager.tr("Click the Reload button to get the latest features."); // permanent message - const flashMessage = osparc.FlashMessenger.getInstance().logAs(msg, "INFO", 0).set({ + const flashMessage = osparc.FlashMessenger.logAs(msg, "INFO", 0).set({ maxWidth: 500 }); const reloadButton = osparc.utils.Utils.reloadNoCacheButton(); diff --git a/services/static-webserver/client/source/class/osparc/Preferences.js b/services/static-webserver/client/source/class/osparc/Preferences.js index 6d5dea18b5d..b2c6977853d 100644 --- a/services/static-webserver/client/source/class/osparc/Preferences.js +++ b/services/static-webserver/client/source/class/osparc/Preferences.js @@ -173,8 +173,7 @@ qx.Class.define("osparc.Preferences", { osparc.Preferences.patchPreference(preferenceId, newValue) .then(() => preferencesSettings.set(preferenceId, newValue)) .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); preferenceField.setValue(oldValue); }) .finally(() => preferenceField.setEnabled(true)); @@ -208,10 +207,7 @@ qx.Class.define("osparc.Preferences", { wallets.forEach(wallet => wallet.setPreferredWallet(wallet.getWalletId() === walletId)); this.setPreferredWalletId(walletId); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, __patchPreference: function(value, _, propName) { diff --git a/services/static-webserver/client/source/class/osparc/TooSmallDialog.js b/services/static-webserver/client/source/class/osparc/TooSmallDialog.js index b60219820f6..16b2e679118 100644 --- a/services/static-webserver/client/source/class/osparc/TooSmallDialog.js +++ b/services/static-webserver/client/source/class/osparc/TooSmallDialog.js @@ -19,7 +19,7 @@ qx.Class.define("osparc.TooSmallDialog", { extend: osparc.ui.window.SingletonWindow, construct: function() { - this.base(arguments, "too-small-logout", this.tr("Window too small")); + this.base(arguments, "too-small-logout", this.tr("Window size too small")); this.set({ layout: new qx.ui.layout.VBox(10), diff --git a/services/static-webserver/client/source/class/osparc/WatchDog.js b/services/static-webserver/client/source/class/osparc/WatchDog.js index 2eacf138d59..61817320ce0 100644 --- a/services/static-webserver/client/source/class/osparc/WatchDog.js +++ b/services/static-webserver/client/source/class/osparc/WatchDog.js @@ -61,7 +61,7 @@ qx.Class.define("osparc.WatchDog", { init: false, nullable: false, event: "changeOnline", - apply: "_applyOnline" + apply: "__applyOnline" }, heartbeatInterval: { @@ -75,8 +75,8 @@ qx.Class.define("osparc.WatchDog", { members: { __clientHeartbeatWWPinger: null, - _applyOnline: function(value) { - let logo = osparc.navigation.LogoOnOff.getInstance(); + __applyOnline: function(value) { + const logo = osparc.navigation.LogoOnOff.getInstance(); if (logo) { logo.setOnline(value); } diff --git a/services/static-webserver/client/source/class/osparc/activityManager/ActivityManager.js b/services/static-webserver/client/source/class/osparc/activityManager/ActivityManager.js index b313750f99a..05ce9b1ca28 100644 --- a/services/static-webserver/client/source/class/osparc/activityManager/ActivityManager.js +++ b/services/static-webserver/client/source/class/osparc/activityManager/ActivityManager.js @@ -119,15 +119,15 @@ qx.Class.define("osparc.activityManager.ActivityManager", { /* const runButton = new qx.ui.toolbar.Button(this.tr("Run"), "@FontAwesome5Solid/play/14"); actionsPart.add(runButton); - runButton.addListener("execute", () => osparc.FlashMessenger.getInstance().logAs("Not implemented")); + runButton.addListener("execute", () => osparc.FlashMessenger.logAs("Not implemented")); const stopButton = new qx.ui.toolbar.Button(this.tr("Stop"), "@FontAwesome5Solid/stop-circle/14"); actionsPart.add(stopButton); - stopButton.addListener("execute", () => osparc.FlashMessenger.getInstance().logAs("Not implemented")); + stopButton.addListener("execute", () => osparc.FlashMessenger.logAs("Not implemented")); const infoButton = new qx.ui.toolbar.Button(this.tr("Info"), "@FontAwesome5Solid/info/14"); actionsPart.add(infoButton); - infoButton.addListener("execute", () => osparc.FlashMessenger.getInstance().logAs("Not implemented")); + infoButton.addListener("execute", () => osparc.FlashMessenger.logAs("Not implemented")); [runButton, stopButton, infoButton].map(button => this.__tree.bind("selected", button, "enabled", { converter: data => data.length > 0 diff --git a/services/static-webserver/client/source/class/osparc/activityManager/ActivityTree.js b/services/static-webserver/client/source/class/osparc/activityManager/ActivityTree.js index 2baea59d4d9..fc1eb0e8011 100644 --- a/services/static-webserver/client/source/class/osparc/activityManager/ActivityTree.js +++ b/services/static-webserver/client/source/class/osparc/activityManager/ActivityTree.js @@ -241,9 +241,7 @@ qx.Class.define("osparc.activityManager.ActivityTree", { } this.fireEvent("treeUpdated"); }) - .catch(e => { - console.error(e); - }) + .catch(err => console.error(err)) .then(() => { // Give a 2 seconds delay setTimeout(() => { diff --git a/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js b/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js index 36329e85b97..bd821027575 100644 --- a/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js +++ b/services/static-webserver/client/source/class/osparc/admin/AdminCenter.js @@ -24,7 +24,7 @@ qx.Class.define("osparc.admin.AdminCenter", { const miniProfile = osparc.desktop.account.MyAccount.createMiniProfileView().set({ paddingRight: 10 }); - this.addWidgetOnTopOfTheTabs(miniProfile); + this.addWidgetToTabs(miniProfile); this.__addPricingPlansPage(); this.__addMaintenancePage(); diff --git a/services/static-webserver/client/source/class/osparc/admin/Announcements.js b/services/static-webserver/client/source/class/osparc/admin/Announcements.js index 84ca9f99079..85576321eeb 100644 --- a/services/static-webserver/client/source/class/osparc/admin/Announcements.js +++ b/services/static-webserver/client/source/class/osparc/admin/Announcements.js @@ -115,7 +115,7 @@ qx.Class.define("osparc.admin.Announcements", { } if (widgets.length === 0) { const msg = "Select at least one widget"; - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } const announcementData = { "id": osparc.utils.Utils.uuidV4(), diff --git a/services/static-webserver/client/source/class/osparc/announcement/Tracker.js b/services/static-webserver/client/source/class/osparc/announcement/Tracker.js index 368fbd6dd00..42c5790acff 100644 --- a/services/static-webserver/client/source/class/osparc/announcement/Tracker.js +++ b/services/static-webserver/client/source/class/osparc/announcement/Tracker.js @@ -26,7 +26,7 @@ qx.Class.define("osparc.announcement.Tracker", { members: { __checkInterval: null, - checkAnnouncements: async function() { + checkAnnouncements: function() { osparc.data.Resources.get("announcements") .then(announcements => { osparc.announcement.AnnouncementUIFactory.getInstance().setAnnouncementsData((announcements && announcements.length) ? announcements : []); diff --git a/services/static-webserver/client/source/class/osparc/auth/Data.js b/services/static-webserver/client/source/class/osparc/auth/Data.js index 2a4b27a9646..cda4da711d4 100644 --- a/services/static-webserver/client/source/class/osparc/auth/Data.js +++ b/services/static-webserver/client/source/class/osparc/auth/Data.js @@ -26,7 +26,7 @@ qx.Class.define("osparc.auth.Data", { properties: { /** - * Basic authentification with a token + * Basic authentication with a token */ auth: { init: null, diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js index 1ff204745d9..bc53f7f9e67 100644 --- a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js +++ b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js @@ -34,9 +34,9 @@ qx.Class.define("osparc.auth.LoginPageS4L", { _getBackgroundImage: function() { let backgroundImage = ""; - const defaultBG = `url(${osparc.product.Utils.getProductBackgroundUrl("Sim4Life-head-default.png")}), url(${osparc.product.Utils.getProductBackgroundUrl("clouds_11.png")})`; - const liteBG = `url(${osparc.product.Utils.getProductBackgroundUrl("Sim4Life-head-lite.png")}), url(${osparc.product.Utils.getProductBackgroundUrl("clouds_11.png")})`; - const academyBG = `url(${osparc.product.Utils.getProductBackgroundUrl("Sim4Life-head-academy.png")}), url(${osparc.product.Utils.getProductBackgroundUrl("clouds_11.png")})`; + const defaultBG = `url(${osparc.product.Utils.getBackgroundUrl("Sim4Life-head-default.png")}), url(${osparc.product.Utils.getBackgroundUrl("clouds_11.png")})`; + const liteBG = `url(${osparc.product.Utils.getBackgroundUrl("Sim4Life-head-lite.png")}), url(${osparc.product.Utils.getBackgroundUrl("clouds_11.png")})`; + const academyBG = `url(${osparc.product.Utils.getBackgroundUrl("Sim4Life-head-academy.png")}), url(${osparc.product.Utils.getBackgroundUrl("clouds_11.png")})`; switch (osparc.product.Utils.getProductName()) { case "s4llite": diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js b/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js index 2901520fe28..68e67bab29c 100644 --- a/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js +++ b/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js @@ -225,7 +225,7 @@ qx.Class.define("osparc.auth.LoginWithDecorators", { pages.setSelection([resetPassword]); } } else if (urlFragment.params && urlFragment.params.registered) { - osparc.FlashMessenger.getInstance().logAs(this.tr("Your account has been created.
You can now use your credentials to login.")); + osparc.FlashMessenger.logAs(this.tr("Your account has been created.
You can now use your credentials to login.")); } login.addListener("toRegister", () => { diff --git a/services/static-webserver/client/source/class/osparc/auth/Manager.js b/services/static-webserver/client/source/class/osparc/auth/Manager.js index fdd082cff96..85c738cc66b 100644 --- a/services/static-webserver/client/source/class/osparc/auth/Manager.js +++ b/services/static-webserver/client/source/class/osparc/auth/Manager.js @@ -176,13 +176,13 @@ qx.Class.define("osparc.auth.Manager", { } else { const resp = JSON.parse(xhr.responseText); if (resp.error == null) { - reject(this.tr("Login failed")); + reject(this.tr("Unsuccessful Login")); } else { reject(resp.error.message); } } }; - xhr.onerror = () => reject(this.tr("Login failed")); + xhr.onerror = () => reject(this.tr("Unsuccessful Login")); xhr.open("POST", url, true); xhr.setRequestHeader("Content-Type", "application/json"); xhr.send(JSON.stringify(params)); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js b/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js index 27aaf2e6f61..ca2beb0757a 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js @@ -97,7 +97,7 @@ qx.Class.define("osparc.auth.ui.Login2FAValidationCodeView", { alignY: "middle" })); const resendCodeDesc = new qx.ui.basic.Label().set({ - value: this.tr("Didn't receive the code? Resend code") + value: this.tr("Didn't receive the code? Click to resend") }); resendLayout.add(resendCodeDesc); @@ -125,7 +125,7 @@ qx.Class.define("osparc.auth.ui.Login2FAValidationCodeView", { }); this.restartSMSButton(retryAfter); }) - .catch(err => osparc.FlashMessenger.logAs(err.message, "ERROR")) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => resendCodeSMSBtn.setFetching(false)); }, this); @@ -145,7 +145,7 @@ qx.Class.define("osparc.auth.ui.Login2FAValidationCodeView", { }); this.restartEmailButton(retryAfter); }) - .catch(err => osparc.FlashMessenger.logAs(err.message, "ERROR")) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => resendCodeEmailBtn.setFetching(false)); }, this); this.add(resendLayout); @@ -188,7 +188,7 @@ qx.Class.define("osparc.auth.ui.Login2FAValidationCodeView", { msg = String(msg) || this.tr("Invalid code"); validationCodeTF.setInvalidMessage(msg); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); }; if (this._form.validate()) { diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index 5143bc8dabb..8610da065e0 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -235,7 +235,7 @@ qx.Class.define("osparc.auth.ui.LoginView", { const twoFactorAuthCbk = (nextStep, message, retryAfter) => { this.__loginBtn.setFetching(false); - osparc.FlashMessenger.getInstance().logAs(message, "INFO"); + osparc.FlashMessenger.logAs(message, "INFO"); this.fireDataEvent("to2FAValidationCode", { userEmail: email.getValue(), nextStep, @@ -251,7 +251,7 @@ qx.Class.define("osparc.auth.ui.LoginView", { const failFun = msg => { this.__loginBtn.setFetching(false); // TODO: can get field info from response here - msg = String(msg) || this.tr("Typed an invalid email or password"); + msg = String(msg) || this.tr("email or password don't look correct"); [email, pass].forEach(item => { item.set({ invalidMessage: msg, @@ -259,7 +259,7 @@ qx.Class.define("osparc.auth.ui.LoginView", { }); }); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); }; const manager = osparc.auth.Manager.getInstance(); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js b/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js index 370f892b902..97749ea860e 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js @@ -74,10 +74,7 @@ qx.Class.define("osparc.auth.ui.RegistrationView", { }); } }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); } // validation @@ -137,10 +134,7 @@ qx.Class.define("osparc.auth.ui.RegistrationView", { this.fireDataEvent("done", log.message); osparc.FlashMessenger.getInstance().log(log); }) - .catch(err => { - const msg = err.message || this.tr("Cannot register user"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Cannot register user"))) .finally(() => submitButton.setFetching(false)); }, diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js index 49140f0630d..8ab05f16cd3 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js @@ -408,13 +408,12 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { }; osparc.data.Resources.fetch("auth", "postRequestAccount", params) .then(() => { - const msg = this.tr("The request is being processed, you will hear from us in the coming hours"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + const msg = this.tr("Your request is being processed. You will hear from us soon"); + osparc.FlashMessenger.logAs(msg, "INFO"); this.fireDataEvent("done"); }) .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); this.__restartCaptcha(); }); }, diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js index 89f8bafc40b..096ba9967a6 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js @@ -90,9 +90,8 @@ qx.Class.define("osparc.auth.ui.ResetPassRequestView", { osparc.FlashMessenger.getInstance().log(log); }; - const failFun = msg => { - msg = msg || this.tr("Could not request password reset"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const failFun = err => { + osparc.FlashMessenger.logError(err, this.tr("Could not request password reset")); }; manager.resetPasswordRequest(email.getValue(), successFun, failFun, this); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js index 7d47d6b14f3..196a0364821 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js @@ -102,9 +102,8 @@ qx.Class.define("osparc.auth.ui.ResetPassView", { osparc.FlashMessenger.getInstance().log(log); }; - const failFun = msg => { - msg = msg || this.tr("Could not reset password"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const failFun = err => { + osparc.FlashMessenger.logError(err, this.tr("Could not reset password")); }; const manager = osparc.auth.Manager.getInstance(); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js b/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js index ea57b141db2..65c5996f1f8 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js @@ -61,7 +61,7 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { this.add(verificationInfoTitle); const verificationInfoDesc = new qx.ui.basic.Label().set({ - value: this.tr("We will send you a text message to your mobile phone to authenticate you each time you log in."), + value: this.tr("A text message will be sent to your mobile phone for authentication each time you log in."), rich: true, wrap: true }); @@ -138,7 +138,7 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { this.__enableEnterCommand(this.__validateCodeBtn); }) .catch(err => { - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); this.__verifyPhoneNumberBtn.setFetching(false); this.__itiInput.setEnabled(true); }); @@ -157,13 +157,13 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { this.fireDataEvent("done", log.message); }; - const failFun = msg => { - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const failFun = err => { + osparc.FlashMessenger.logError(err); this.__validateCodeBtn.setFetching(false); // TODO: can get field info from response here - msg = String(msg) || this.tr("Invalid code"); + err = String(err) || this.tr("Invalid code"); this.__validateCodeField.set({ - invalidMessage: msg, + invalidMessage: err, valid: false }); }; @@ -185,7 +185,7 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { retryAfter }); }) - .catch(err => osparc.FlashMessenger.logAs(err.message, "ERROR")) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.__sendViaEmail.setFetching(false)); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index 6d74de34376..fc5c41c1999 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -55,8 +55,9 @@ qx.Class.define("osparc.dashboard.CardBase", { MODE_APP: "@FontAwesome5Solid/desktop/13", NEW_ICON: "@FontAwesome5Solid/plus/", LOADING_ICON: "@FontAwesome5Solid/circle-notch/", - // Get the default thumbnail for each product else add the image and extension osparc.product.Utils.getProductThumbUrl(Thumbnail-01.png) - PRODUCT_ICON: osparc.product.Utils.getProductThumbUrl(), + PRODUCT_ICON: osparc.product.Utils.getIconUrl(), + // Get the default thumbnail for each product else add the image and extension osparc.product.Utils.getThumbnailUrl(Thumbnail-01.png) + PRODUCT_THUMBNAIL: osparc.product.Utils.getThumbnailUrl(), CARD_PRIORITY: { NEW: 0, @@ -65,6 +66,23 @@ qx.Class.define("osparc.dashboard.CardBase", { LOADER: 3 }, + ICON_SIZE: 32, + + createCardIcon: function() { + const iconSize = osparc.dashboard.CardBase.ICON_SIZE; + const icon = new osparc.ui.basic.Thumbnail(null, iconSize, iconSize).set({ + minHeight: iconSize, + minWidth: iconSize, + }); + icon.getChildControl("image").set({ + anonymous: true, + decorator: "rounded", + minWidth: iconSize, + minHeight: iconSize, + }); + return icon + }, + createTSRLayout: function() { const layout = new qx.ui.container.Composite(new qx.ui.layout.HBox(2).set({ alignY: "middle" @@ -269,6 +287,13 @@ qx.Class.define("osparc.dashboard.CardBase", { apply: "_applyIcon", }, + thumbnail: { + check: "String", + init: null, + nullable: true, + apply: "_applyThumbnail", + }, + resourceData: { check: "Object", nullable: false, @@ -348,11 +373,18 @@ qx.Class.define("osparc.dashboard.CardBase", { workbench: { check: "Object", nullable: true, - apply: "__applyWorkbench" + }, + + services: { + check: "Array", + init: true, + nullable: false, + apply: "__applyServices", + event: "changeServices", }, uiMode: { - check: ["workbench", "guided", "app"], + check: ["workbench", "guided", "app", "standalone"], // "guided" is no longer used nullable: true, apply: "__applyUiMode" }, @@ -453,6 +485,7 @@ qx.Class.define("osparc.dashboard.CardBase", { let owner = null; let workbench = null; let defaultHits = null; + let icon = null; switch (resourceData["resourceType"]) { case "study": uuid = resourceData.uuid ? resourceData.uuid : null; @@ -467,6 +500,7 @@ qx.Class.define("osparc.dashboard.CardBase", { case "service": uuid = resourceData.key ? resourceData.key : null; owner = resourceData.owner ? resourceData.owner : resourceData.contact; + icon = resourceData["icon"] || osparc.dashboard.CardBase.PRODUCT_ICON; defaultHits = 0; break; } @@ -481,7 +515,8 @@ qx.Class.define("osparc.dashboard.CardBase", { lastChangeDate: resourceData.lastChangeDate ? new Date(resourceData.lastChangeDate) : null, trashedAt: resourceData.trashedAt ? new Date(resourceData.trashedAt) : null, trashedBy: resourceData.trashedBy || null, - icon: resourceData.thumbnail || this.self().PRODUCT_ICON, + icon, + thumbnail: resourceData.thumbnail || this.self().PRODUCT_THUMBNAIL, state: resourceData.state ? resourceData.state : {}, classifiers: resourceData.classifiers && resourceData.classifiers ? resourceData.classifiers : [], quality: resourceData.quality ? resourceData.quality : null, @@ -489,6 +524,23 @@ qx.Class.define("osparc.dashboard.CardBase", { hits: resourceData.hits ? resourceData.hits : defaultHits, workbench }); + + if (resourceData["resourceType"] === "study" || resourceData["resourceType"] === "template") { + const params = { + url: { + studyId: this.getResourceData()["uuid"] + } + }; + osparc.data.Resources.fetch("studies", "getServices", params) + .then(resp => { + const services = resp["services"]; + resourceData["services"] = services; + this.setServices(services); + }); + + osparc.study.Utils.guessIcon(resourceData) + .then(iconSource => this.setIcon(iconSource)); + } }, __applyMultiSelectionMode: function(value) { @@ -500,22 +552,20 @@ qx.Class.define("osparc.dashboard.CardBase", { __evalSelectedButton: function() { if ( + this.hasChildControl("menu-selection-stack") && this.hasChildControl("menu-button") && this.hasChildControl("tick-selected") && this.hasChildControl("tick-unselected") ) { - const menuButton = this.getChildControl("menu-button"); - const tick = this.getChildControl("tick-selected"); - const untick = this.getChildControl("tick-unselected"); + const menuButtonStack = this.getChildControl("menu-selection-stack"); if (this.isResourceType("study") && this.isMultiSelectionMode()) { + const tick = this.getChildControl("tick-selected"); + const untick = this.getChildControl("tick-unselected"); const selected = this.getSelected(); - menuButton.setVisibility("excluded"); - tick.setVisibility(selected ? "visible" : "excluded"); - untick.setVisibility(selected ? "excluded" : "visible"); + menuButtonStack.setSelection(selected ? [tick] : [untick]); } else { - menuButton.setVisibility("visible"); - tick.setVisibility("excluded"); - untick.setVisibility("excluded"); + const menuButton = this.getChildControl("menu-button"); + menuButtonStack.setSelection([menuButton]); } } }, @@ -531,6 +581,10 @@ qx.Class.define("osparc.dashboard.CardBase", { throw new Error("Abstract method called!"); }, + _applyThumbnail: function(value, old) { + throw new Error("Abstract method called!"); + }, + _applyTitle: function(value, old) { throw new Error("Abstract method called!"); }, @@ -582,21 +636,16 @@ qx.Class.define("osparc.dashboard.CardBase", { }, __applyUiMode: function(uiMode) { - let source = null; - let toolTipText = null; switch (uiMode) { case "guided": - case "app": - source = osparc.dashboard.CardBase.MODE_APP; - toolTipText = this.tr("App mode"); + case "app": { + const uiModeIcon = this.getChildControl("workbench-mode"); + uiModeIcon.set({ + source: osparc.dashboard.CardBase.MODE_APP, + toolTipText: this.tr("App mode"), + }); break; - } - if (source) { - const uiModeIcon = this.getChildControl("workbench-mode"); - uiModeIcon.set({ - source, - toolTipText, - }); + } } }, @@ -607,39 +656,31 @@ qx.Class.define("osparc.dashboard.CardBase", { } }, - __applyWorkbench: function(workbench) { - if (workbench === null) { - // it is a service - return; - } - - if (this.isResourceType("study") || this.isResourceType("template")) { - this.setEmptyWorkbench(Object.keys(workbench).length === 0); - } + __applyServices: function(services) { + this.setEmptyWorkbench(services.length === 0); // Updatable study - if (osparc.study.Utils.isWorkbenchRetired(workbench)) { + if (osparc.study.Utils.anyServiceRetired(services)) { this.setUpdatable("retired"); - } else if (osparc.study.Utils.isWorkbenchDeprecated(workbench)) { + } else if (osparc.study.Utils.anyServiceDeprecated(services)) { this.setUpdatable("deprecated"); - } else { - const updatable = osparc.study.Utils.isWorkbenchUpdatable(workbench) - if (updatable) { - this.setUpdatable("updatable"); - } + } else if (osparc.study.Utils.anyServiceUpdatable(services)) { + this.setUpdatable("updatable"); } // Block card - const unaccessibleServices = osparc.study.Utils.getInaccessibleServices(workbench) + const unaccessibleServices = osparc.study.Utils.getCantExecuteServices(services); if (unaccessibleServices.length) { this.setBlocked("UNKNOWN_SERVICES"); - let image = "@FontAwesome5Solid/ban/"; - let toolTipText = this.tr("Service info missing"); + const image = "@FontAwesome5Solid/ban/"; + let toolTipText = this.tr("Unaccessible service(s):"); unaccessibleServices.forEach(unSrv => { - toolTipText += "
" + unSrv.key + ":" + unSrv.version; + toolTipText += "
" + unSrv.key + ":" + osparc.service.Utils.extractVersionDisplay(unSrv.release); }); this.__showBlockedCard(image, toolTipText); } + + this.evaluateMenuButtons(); }, __applyEmptyWorkbench: function(isEmpty) { @@ -726,7 +767,7 @@ qx.Class.define("osparc.dashboard.CardBase", { break; case "FAILED": iconSource = "@FontAwesome5Solid/exclamation/10"; - toolTipText = this.tr("Ran with error"); + toolTipText = this.tr("Unsuccessful Run"); borderColor = "error"; break; case "UNKNOWN": @@ -882,6 +923,10 @@ qx.Class.define("osparc.dashboard.CardBase", { if (duplicateButton) { duplicateButton.setEnabled(osparc.study.Utils.canBeDuplicated(resourceData)); } + const convertToPipelineButton = menuButtons.find(menuBtn => "convertToPipelineButton" in menuBtn); + if (convertToPipelineButton) { + convertToPipelineButton.setEnabled(osparc.study.Utils.canBeDuplicated(resourceData)); + } const exportCMISButton = menuButtons.find(menuBtn => "exportCMISButton" in menuBtn); if (exportCMISButton) { exportCMISButton.setEnabled(osparc.study.Utils.canBeExported(resourceData)); @@ -922,9 +967,14 @@ qx.Class.define("osparc.dashboard.CardBase", { } }, - __openMoreOptions: function() { + __openResourceDetails: function(openWindowCB) { const resourceData = this.getResourceData(); const resourceDetails = new osparc.dashboard.ResourceDetails(resourceData); + resourceDetails.addListenerOnce("pagesAdded", () => { + if (openWindowCB in resourceDetails) { + resourceDetails[openWindowCB](); + } + }) const win = osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); [ "updateStudy", @@ -951,33 +1001,28 @@ qx.Class.define("osparc.dashboard.CardBase", { }, openData: function() { - const moreOpts = this.__openMoreOptions(); - moreOpts.openData(); + const resourceData = this.getResourceData(); + osparc.widget.StudyDataManager.popUpInWindow(resourceData["uuid"]); }, openBilling: function() { - const moreOpts = this.__openMoreOptions(); - moreOpts.openBillingSettings(); + this.__openResourceDetails("openBillingSettings"); }, openAccessRights: function() { - const moreOpts = this.__openMoreOptions(); - moreOpts.openAccessRights(); + this.__openResourceDetails("openAccessRights"); }, openTags: function() { - const moreOpts = this.__openMoreOptions(); - moreOpts.openTags(); + this.__openResourceDetails("openTags"); }, __openQualityEditor: function() { - const moreOpts = this.__openMoreOptions(); - moreOpts.openQuality(); + this.__openResourceDetails("openQuality"); }, __openUpdateServices: function() { - const moreOpts = this.__openMoreOptions(); - moreOpts.openUpdateServices(); + this.__openResourceDetails("openUpdateServices"); }, _getEmptyWorkbenchIcon: function() { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js index b40fd6fc1be..ee1c1bbbe76 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js @@ -38,9 +38,8 @@ qx.Class.define("osparc.dashboard.Dashboard", { construct: function() { this.base(arguments); - this.getChildControl("bar").set({ - visibility: "excluded", - }); + osparc.utils.Utils.setIdToWidget(this.getChildControl("bar"), "dashboardTabs"); + osparc.utils.Utils.setIdToWidget(this, "dashboard"); this.set({ contentPadding: this.self().PADDING, @@ -48,7 +47,8 @@ qx.Class.define("osparc.dashboard.Dashboard", { barPosition: "top" }); - osparc.wrapper.Plotly.getInstance().init(); + // osparc.wrapper.Plotly.getInstance().init(); + // osparc.wrapper.Three.getInstance().init(); osparc.wrapper.Svg.getInstance().init(); osparc.wrapper.JsonDiffPatch.getInstance().init(); osparc.wrapper.JsonTreeViewer.getInstance().init(); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js index f8ffd3d73d0..2305feaa8a4 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js @@ -60,7 +60,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { this.addListener("appear", () => { const treeFolderView = this.getChildControl("tree-folder-view"); - treeFolderView.getChildControl("folder-tree").populateTree(); + treeFolderView.getChildControl("folder-tree").populateLocations(); treeFolderView.getChildControl("folder-viewer").setFolder(treeFolderView.getChildControl("folder-tree").getModel()); }, this); }, @@ -84,7 +84,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { const foldersTree = treeFolderView.getChildControl("folder-tree"); foldersTree.resetCache(); - foldersTree.populateTree(); + foldersTree.populateLocations(); const folderViewer = treeFolderView.getChildControl("folder-viewer"); folderViewer.resetFolder(); @@ -94,7 +94,7 @@ qx.Class.define("osparc.dashboard.DataBrowser", { // After deleting a file, try to keep the user in the same folder. // If the folder doesn't longer exist, open the closest available parent - const path = fileMetadata["fileUuid"].split("/"); + const pathParts = fileMetadata["fileUuid"].split("/"); const treeFolderView = this.getChildControl("tree-folder-view"); const foldersTree = treeFolderView.getChildControl("folder-tree"); @@ -102,18 +102,18 @@ qx.Class.define("osparc.dashboard.DataBrowser", { const openSameFolder = () => { // drop last, which is the file - path.pop(); - treeFolderView.openPath(path); + pathParts.pop(); + treeFolderView.openPath(pathParts); }; folderViewer.resetFolder(); const locationId = fileMetadata["locationId"]; - const datasetId = path[0]; + const path = pathParts[0]; foldersTree.resetCache(); - foldersTree.populateTree() + foldersTree.populateLocations() .then(datasetPromises => { Promise.all(datasetPromises) - .then(() => foldersTree.requestDatasetFiles(locationId, datasetId)) + .then(() => foldersTree.requestPathItems(locationId, path)) .then(() => openSameFolder()); }) .catch(err => console.error(err)); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/DragWidget.js b/services/static-webserver/client/source/class/osparc/dashboard/DragWidget.js index 4685f93caaf..9fc43ac7304 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/DragWidget.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/DragWidget.js @@ -71,7 +71,7 @@ qx.Class.define("osparc.dashboard.DragWidget", { __dropAllowed: function(allowed) { this.getChildControl("allowed-icon").set({ source: allowed ? "@FontAwesome5Solid/check/14" : "@FontAwesome5Solid/times/14", - textColor: allowed ? "default-button-text" : "danger-red", + textColor: allowed ? "white" : "danger-red", }); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js index 18dad5fc65d..dfd0b2453a3 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js @@ -253,7 +253,7 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { const dateBy = this.getChildControl("date-by"); dateBy.set({ date: value, - toolTipText: this.tr("Moved to the bin"), + toolTipText: this.tr("Deleted"), }); } }, @@ -269,9 +269,10 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { const menuButton = this.getChildControl("menu-button"); menuButton.setVisibility("visible"); - const menu = new qx.ui.menu.Menu(); - menu.setPosition("bottom-right"); - osparc.utils.Utils.prettifyMenu(menu); + const menu = new qx.ui.menu.Menu().set({ + appearance: "menu-wider", + position: "bottom-right", + }); const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); if ( @@ -302,7 +303,7 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { menu.addSeparator(); - const trashButton = new qx.ui.menu.Button(this.tr("Move to Bin"), "@FontAwesome5Solid/trash/12"); + const trashButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); trashButton.addListener("execute", () => this.fireDataEvent("trashFolderRequested", this.getFolderId()), this); menu.add(trashButton); } else if (studyBrowserContext === "trash") { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js index ebda818baf0..34796bcec18 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js @@ -44,11 +44,10 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { ITEM_WIDTH: 190, ITEM_HEIGHT: 220, PADDING: 10, + TITLE_PADDING: 6, SPACING_IN: 5, SPACING: 15, - // TITLE_MAX_HEIGHT: 34, // two lines in Roboto - TITLE_MAX_HEIGHT: 40, // two lines in Manrope - ICON_SIZE: 50, + THUMBNAIL_SIZE: 50, POS: { TITLE: { row: 0, @@ -117,7 +116,6 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { grid.setSpacing(this.self().SPACING_IN); grid.setRowFlex(2, 1); grid.setColumnFlex(0, 1); - grid.setRowMaxHeight(0, this.self().TITLE_MAX_HEIGHT); control = new qx.ui.container.Composite().set({ maxWidth: this.self().ITEM_WIDTH, @@ -138,18 +136,26 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { }); break; } - case "header": - control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ - anonymous: true, - allowGrowX: true, - allowShrinkX: false, - alignY: "middle", - padding: this.self().PADDING + case "header": { + const hGrid = new qx.ui.layout.Grid().set({ + spacing: 6, }); - control.set({ - backgroundColor: "background-card-overlay" + hGrid.setRowFlex(0, 1); + hGrid.setColumnFlex(1, 1); + hGrid.setColumnAlign(0, "right", "middle"); + hGrid.setColumnAlign(1, "left", "middle"); + hGrid.setColumnAlign(2, "center", "middle"); + control = new qx.ui.container.Composite().set({ + backgroundColor: "background-card-overlay", + paddingBottom: 6, + paddingRight: 4, + maxWidth: this.self().ITEM_WIDTH, + minHeight: 32 + 6, + maxHeight: this.self().ITEM_HEIGHT }); + control.setLayout(hGrid); break; + } case "body": control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ decorator: "main", @@ -163,58 +169,48 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { }); break; case "footer": { - const fgrid = new qx.ui.layout.Grid(); - fgrid.setSpacing(2); - fgrid.setColumnFlex(0, 1); + const fGrid = new qx.ui.layout.Grid(); + fGrid.setSpacing(2); + fGrid.setColumnFlex(0, 1); control = new qx.ui.container.Composite().set({ backgroundColor: "background-card-overlay", padding: this.self().PADDING - 2, maxWidth: this.self().ITEM_WIDTH, maxHeight: this.self().ITEM_HEIGHT }); - control.setLayout(fgrid); + control.setLayout(fGrid); break; } - case "title-row": - control = new qx.ui.container.Composite(new qx.ui.layout.VBox(6)).set({ - anonymous: true, - maxWidth: this.self().ITEM_WIDTH - 2*this.self().PADDING - }); + case "icon": { + control = osparc.dashboard.CardBase.createCardIcon(); layout = this.getChildControl("header"); - layout.addAt(control, 1, { - flex: 1 + layout.add(control, { + column: 0, + row: 0, }); break; + } case "title": control = new qx.ui.basic.Label().set({ textColor: "contrasted-text-light", font: "text-14", - maxWidth: this.self().ITEM_WIDTH - 2*this.self().PADDING, - maxHeight: this.self().TITLE_MAX_HEIGHT - }); - layout = this.getChildControl("title-row"); - layout.addAt(control, 0, { - flex: 1 }); - break; - case "subtitle": - control = new qx.ui.container.Composite(new qx.ui.layout.HBox(6)).set({ - anonymous: true, - height: 20 - }); - layout = this.getChildControl("title-row"); - layout.addAt(control, 1, { - flex: 1 + layout = this.getChildControl("header"); + layout.add(control, { + column: 1, + row: 0, }); break; case "subtitle-icon": { control = new qx.ui.basic.Image().set({ - alignY: "middle", allowGrowX: false, - allowShrinkX: false + allowShrinkX: false, + }); + layout = this.getChildControl("header"); + layout.add(control, { + column: 0, + row: 1, }); - const subtitleLayout = this.getChildControl("subtitle"); - subtitleLayout.addAt(control, 0); break; } case "subtitle-text": { @@ -228,13 +224,14 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { font: "text-12", allowGrowY: false }); - const subtitleLayout = this.getChildControl("subtitle"); - subtitleLayout.addAt(control, 1, { - flex: 1 + layout = this.getChildControl("header"); + layout.add(control, { + column: 1, + row: 1, }); break; } - case "icon": { + case "thumbnail": { layout = this.getChildControl("body"); const maxWidth = this.self().ITEM_WIDTH; control = new osparc.ui.basic.Thumbnail(null, maxWidth, 124); @@ -259,14 +256,15 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { case "project-status": control = new qx.ui.basic.Image().set({ alignY: "middle", - textColor: "status_icon", + textColor: "text", height: 13, width: 13, margin: [0, 1] }); - layout = this.getChildControl("subtitle"); - layout.set({ - visibility: "visible" + layout = this.getChildControl("header"); + layout.add(control, { + column: 2, + row: 1, }); layout.addAt(control, 2); break; @@ -276,27 +274,39 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { // overridden _applyIcon: function(value, old) { - if (value.includes("@FontAwesome5Solid/")) { - value += this.self().ICON_SIZE; + if (value) { const image = this.getChildControl("icon").getChildControl("image"); image.set({ - source: value + source: value, }); + osparc.utils.Utils.setAltToImage(image, "card-icon"); + } + }, - [ - "appear", - "loaded" - ].forEach(eventName => { - image.addListener(eventName, () => this.__fitIconHeight(), this); + // overridden + _applyThumbnail: function(value, old) { + if (qx.util.ResourceManager.getInstance().isFontUri(value)) { + value += this.self().THUMBNAIL_SIZE; + this.getChildControl("thumbnail").set({ + source: value, }); } else { - this.getContentElement().setStyles({ - "background-image": `url(${value})`, - "background-repeat": "no-repeat", - "background-size": "cover", // auto width, 85% height - "background-position": "center center", - "background-origin": "border-box" - }); + let source = osparc.product.Utils.getThumbnailUrl(); + osparc.utils.Utils.checkImageExists(value) + .then(exists => { + if (exists) { + source = value; + } + }) + .finally(() => { + this.getContentElement().setStyles({ + "background-image": `url(${source})`, + "background-repeat": "no-repeat", + "background-size": "cover", // auto width, 85% height + "background-position": "center center", + "background-origin": "border-box" + }); + }); } }, @@ -312,8 +322,8 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { return; }, - __fitIconHeight: function() { - const iconLayout = this.getChildControl("icon"); + __fitThumbnailHeight: function() { + const thumbnailLayout = this.getChildControl("thumbnail"); let maxHeight = this.getHeight() - this.getPaddingTop() - this.getPaddingBottom() - 5; const checkThis = [ "title", @@ -333,17 +343,9 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { }); // maxHeight -= 4; // for Roboto maxHeight -= 18; // for Manrope - iconLayout.getChildControl("image").setMaxHeight(maxHeight); - iconLayout.setMaxHeight(maxHeight); - iconLayout.recheckSize(); - }, - - replaceIcon: function(newIcon) { - const plusIcon = this.getChildControl("icon"); - plusIcon.exclude(); - - const bodyLayout = this.getChildControl("body"); - bodyLayout.add(newIcon, {flex: 1}); + thumbnailLayout.getChildControl("image").setMaxHeight(maxHeight); + thumbnailLayout.setMaxHeight(maxHeight); + thumbnailLayout.recheckSize(); }, /** diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js index 22779e7831b..ad0bf2ec3ae 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js @@ -91,9 +91,20 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { layout = this.getChildControl("main-layout"); layout.add(control, osparc.dashboard.GridButtonBase.POS.TAGS); break; - case "menu-button": + case "menu-selection-stack": + control = new qx.ui.container.Stack(); + control.set({ + alignX: "center", + alignY: "middle" + }); + this.getChildControl("header").add(control, { + column: 2, + row: 0, + }); + break; + case "menu-button": { this.getChildControl("title").set({ - maxWidth: osparc.dashboard.GridButtonBase.ITEM_WIDTH - 2*osparc.dashboard.GridButtonBase.PADDING - this.self().MENU_BTN_DIMENSIONS + maxWidth: osparc.dashboard.GridButtonBase.ITEM_WIDTH - osparc.dashboard.CardBase.ICON_SIZE - this.self().MENU_BTN_DIMENSIONS - 2, }); control = new qx.ui.form.MenuButton().set({ appearance: "form-button-outlined", @@ -110,12 +121,11 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { "border-radius": `${this.self().MENU_BTN_DIMENSIONS / 2}px` }); osparc.utils.Utils.setIdToWidget(control, "studyItemMenuButton"); - this._add(control, { - top: 8, - right: 8 - }); + const menuSelectionStack = this.getChildControl("menu-selection-stack"); + menuSelectionStack.addAt(control, 0); break; - case "tick-unselected": + } + case "tick-unselected": { control = new qx.ui.basic.Atom().set({ appearance: "form-button-outlined", width: this.self().MENU_BTN_DIMENSIONS, @@ -126,12 +136,11 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { control.getContentElement().setStyles({ "border-radius": `${this.self().MENU_BTN_DIMENSIONS / 2}px` }); - this._add(control, { - top: 8, - right: 8 - }); + const menuSelectionStack = this.getChildControl("menu-selection-stack"); + menuSelectionStack.addAt(control, 1); break; - case "tick-selected": + } + case "tick-selected": { control = new qx.ui.basic.Image().set({ appearance: "form-button", width: this.self().MENU_BTN_DIMENSIONS, @@ -146,11 +155,10 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { control.getContentElement().setStyles({ "border-radius": `${this.self().MENU_BTN_DIMENSIONS / 2}px` }); - this._add(control, { - top: 8, - right: 8 - }); + const menuSelectionStack = this.getChildControl("menu-selection-stack"); + menuSelectionStack.addAt(control, 2); break; + } case "lock-status": control = new osparc.ui.basic.Thumbnail(); this._add(control, { @@ -194,7 +202,7 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { const dateBy = this.getChildControl("date-by"); dateBy.set({ date: value, - toolTipText: this.tr("Moved to the bin"), + toolTipText: this.tr("Deleted"), }); } } @@ -244,10 +252,12 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { _applyTags: function(tags) { if (osparc.data.Permissions.getInstance().canDo("study.tag")) { + const maxTags = 2; const tagsContainer = this.getChildControl("tags"); tagsContainer.setVisibility(tags.length ? "visible" : "excluded"); tagsContainer.removeAll(); - tags.forEach(tag => { + for (let i=0; i<=tags.length && i this.fireDataEvent("tagClicked", tag)); tagsContainer.add(tagUI); - }); + } + if (tags.length > maxTags) { + const moreButton = new qx.ui.basic.Label(this.tr("More...")).set({ + font: "text-12", + backgroundColor: "strong-main", + appearance: "tag", + }); + tagsContainer.add(moreButton); + } } }, @@ -263,9 +281,10 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { _applyMenu: function(menu, old) { const menuButton = this.getChildControl("menu-button"); if (menu) { - menuButton.setMenu(menu); - menu.setPosition("bottom-left"); - osparc.utils.Utils.prettifyMenu(menu); + menuButton.setMenu(menu).set({ + appearance: "menu-wider", + position: "bottom-left", + }); osparc.utils.Utils.setIdToWidget(menu, "studyItemMenuMenu"); this.evaluateMenuButtons(); } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js index af3bf1ae666..47ccb900cbd 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonLoadMore.js @@ -39,12 +39,12 @@ qx.Class.define("osparc.dashboard.GridButtonLoadMore", { members: { _applyFetching: function(value) { - this.setIcon(osparc.dashboard.CardBase.LOADING_ICON); + this.setThumbnail(osparc.dashboard.CardBase.LOADING_ICON); if (value) { - this.getChildControl("icon").getChildControl("image").getContentElement() + this.getChildControl("thumbnail").getChildControl("image").getContentElement() .addClass("rotate"); } else { - this.getChildControl("icon").getChildControl("image").getContentElement() + this.getChildControl("thumbnail").getChildControl("image").getContentElement() .removeClass("rotate"); } this.setEnabled(!value); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js index 3cb8a8c92b7..f4f0a9077e3 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js @@ -61,7 +61,7 @@ qx.Class.define("osparc.dashboard.GridButtonNew", { descLabel.setValue(description.toString()); } - this.setIcon(osparc.dashboard.CardBase.NEW_ICON); + this.setThumbnail(osparc.dashboard.CardBase.NEW_ICON); this.getChildControl("footer").exclude(); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonTaskPlaceholder.js similarity index 93% rename from services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js rename to services/static-webserver/client/source/class/osparc/dashboard/GridButtonTaskPlaceholder.js index b6eb9906ef7..f3e8a24e718 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonPlaceholder.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonTaskPlaceholder.js @@ -15,7 +15,7 @@ ************************************************************************ */ -qx.Class.define("osparc.dashboard.GridButtonPlaceholder", { +qx.Class.define("osparc.dashboard.GridButtonTaskPlaceholder", { extend: osparc.dashboard.GridButtonBase, construct: function() { @@ -77,7 +77,6 @@ qx.Class.define("osparc.dashboard.GridButtonPlaceholder", { break; } case "progress-bar": { - layout = this.getChildControl("title-row"); control = new qx.ui.indicator.ProgressBar().set({ maxHeight: 6, alignX: "center", @@ -89,7 +88,11 @@ qx.Class.define("osparc.dashboard.GridButtonPlaceholder", { control.getChildControl("progress").set({ backgroundColor: "strong-main" }); - layout.addAt(control, 1); + layout = this.getChildControl("header"); + layout.add(control, { + column: 1, + row: 2, + }); break; } } @@ -102,7 +105,7 @@ qx.Class.define("osparc.dashboard.GridButtonPlaceholder", { title.setValue(titleText); } if (icon) { - this.setIcon(icon); + this.setThumbnail(icon); } const stateLabel = this.getChildControl("state-label"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js index d99d33f6608..8e7bdd14488 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js @@ -66,14 +66,7 @@ qx.Class.define("osparc.dashboard.ListButtonBase", { let control; switch (id) { case "icon": { - control = new osparc.ui.basic.Thumbnail(null, this.self().ITEM_HEIGHT, this.self().ITEM_HEIGHT-2*5).set({ - minHeight: this.self().ITEM_HEIGHT, - minWidth: this.self().ITEM_HEIGHT - }); - control.getChildControl("image").set({ - anonymous: true, - decorator: "rounded", - }); + control = osparc.dashboard.CardBase.createCardIcon(); this._add(control, { row: 0, column: osparc.dashboard.ListButtonBase.POS.THUMBNAIL @@ -124,7 +117,7 @@ qx.Class.define("osparc.dashboard.ListButtonBase", { case "project-status": control = new qx.ui.basic.Image().set({ alignY: "middle", - textColor: "status_icon", + textColor: "text", height: 12, width: 12 }); @@ -135,13 +128,21 @@ qx.Class.define("osparc.dashboard.ListButtonBase", { }, _applyIcon: function(value, old) { - if (value.includes("@FontAwesome5Solid/")) { - value += "22"; + if (value) { + if (value.includes("@FontAwesome5Solid/")) { + value += "22"; + } + const image = this.getChildControl("icon").getChildControl("image"); + image.set({ + source: value + }); + osparc.utils.Utils.setAltToImage(image, "card-icon"); } - const image = this.getChildControl("icon").getChildControl("image"); - image.set({ - source: value - }); + }, + + _applyThumbnail: function(value, old) { + // do not shot thumbnail in list button, icon takes over + return; }, _applyTitle: function(value, old) { @@ -150,7 +151,7 @@ qx.Class.define("osparc.dashboard.ListButtonBase", { }, _applyDescription: function(value, old) { - return + return; } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js index 7065f6dd20f..3cfb70d3b7d 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js @@ -17,8 +17,6 @@ /** * Widget used mainly by StudyBrowser for displaying Studies - * - * It consists of a thumbnail and creator and last change as caption */ qx.Class.define("osparc.dashboard.ListButtonItem", { @@ -134,8 +132,25 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { column: osparc.dashboard.ListButtonBase.POS.OPTIONS }); break; - case "tick-unselected": { + case "menu-button": { + control = new qx.ui.form.MenuButton().set({ + appearance: "form-button-outlined", + padding: [0, 8], + maxWidth: this.self().MENU_BTN_DIMENSIONS, + maxHeight: this.self().MENU_BTN_DIMENSIONS, + icon: "@FontAwesome5Solid/ellipsis-v/14", + focusable: false + }); + // make it circular + control.getContentElement().setStyles({ + "border-radius": `${this.self().MENU_BTN_DIMENSIONS / 2}px` + }); + osparc.utils.Utils.setIdToWidget(control, "studyItemMenuButton"); const menuSelectionStack = this.getChildControl("menu-selection-stack"); + menuSelectionStack.addAt(control, 0); + break; + } + case "tick-unselected": { control = new qx.ui.basic.Atom().set({ appearance: "form-button-outlined", width: this.self().MENU_BTN_DIMENSIONS, @@ -145,11 +160,11 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { control.getContentElement().setStyles({ "border-radius": `${this.self().MENU_BTN_DIMENSIONS / 2}px` }); + const menuSelectionStack = this.getChildControl("menu-selection-stack"); menuSelectionStack.addAt(control, 1); break; } case "tick-selected": { - const menuSelectionStack = this.getChildControl("menu-selection-stack"); control = new qx.ui.basic.Image("@FontAwesome5Solid/check/12").set({ appearance: "form-button-outlined", width: this.self().MENU_BTN_DIMENSIONS, @@ -160,25 +175,8 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { control.getContentElement().setStyles({ "border-radius": `${this.self().MENU_BTN_DIMENSIONS / 2}px` }); - menuSelectionStack.addAt(control, 2); - break; - } - case "menu-button": { const menuSelectionStack = this.getChildControl("menu-selection-stack"); - control = new qx.ui.form.MenuButton().set({ - appearance: "form-button-outlined", - padding: [0, 8], - maxWidth: this.self().MENU_BTN_DIMENSIONS, - maxHeight: this.self().MENU_BTN_DIMENSIONS, - icon: "@FontAwesome5Solid/ellipsis-v/14", - focusable: false - }); - // make it circular - control.getContentElement().setStyles({ - "border-radius": `${this.self().MENU_BTN_DIMENSIONS / 2}px` - }); - osparc.utils.Utils.setIdToWidget(control, "studyItemMenuButton"); - menuSelectionStack.addAt(control, 0); + menuSelectionStack.addAt(control, 2); break; } } @@ -205,7 +203,7 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { const dateBy = this.getChildControl("date-by"); dateBy.set({ date: value, - toolTipText: this.tr("Moved to the bin"), + toolTipText: this.tr("Deleted"), }); } } @@ -255,9 +253,11 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { _applyTags: function(tags) { if (osparc.data.Permissions.getInstance().canDo("study.tag")) { + const maxTags = 2; const tagsContainer = this.getChildControl("tags"); tagsContainer.removeAll(); - tags.forEach(tag => { + for (let i=0; i<=tags.length && i this.fireDataEvent("tagClicked", tag)); tagsContainer.add(tagUI); - }); + } + if (tags.length > maxTags) { + const moreButton = new qx.ui.basic.Label(this.tr("More...")).set({ + font: "text-12", + backgroundColor: "strong-main", + appearance: "tag", + }); + tagsContainer.add(moreButton); + } this.__makeItemResponsive(tagsContainer); } }, @@ -286,9 +294,10 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { _applyMenu: function(menu, old) { const menuButton = this.getChildControl("menu-button"); if (menu) { - menuButton.setMenu(menu); - menu.setPosition("bottom-left"); - osparc.utils.Utils.prettifyMenu(menu); + menuButton.setMenu(menu).set({ + appearance: "menu-wider", + position: "bottom-left", + }); osparc.utils.Utils.setIdToWidget(menu, "studyItemMenuMenu"); this.evaluateMenuButtons(); } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonPlaceholder.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonTaskPlaceholder.js similarity index 98% rename from services/static-webserver/client/source/class/osparc/dashboard/ListButtonPlaceholder.js rename to services/static-webserver/client/source/class/osparc/dashboard/ListButtonTaskPlaceholder.js index d813261ef3c..2f6a43e21d5 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonPlaceholder.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonTaskPlaceholder.js @@ -15,7 +15,7 @@ ************************************************************************ */ -qx.Class.define("osparc.dashboard.ListButtonPlaceholder", { +qx.Class.define("osparc.dashboard.ListButtonTaskPlaceholder", { extend: osparc.dashboard.ListButtonBase, construct: function() { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js b/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js index fb21a72cc61..6d8c391fb93 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js @@ -15,19 +15,67 @@ ************************************************************************ */ +/** + Supports: + "categories": [{ + "id": "string", // required + "title": "string", // required + "description": "string" // optional + }], + "resources": [{ + "resourceType": "study", // it will start an empty study + "title": "string", // required + "icon": "fontAwesome inner link | url", // optional + "newStudyLabel": "string", // optional + "idToWidget": "string" // optional + }, { + "resourceType": "template", // it will create a study from the template + "expectedTemplateLabel": "string", // required + "title": "string", // required + "icon": "fontAwesome inner link | url", // optional + "newStudyLabel": "string", // optional + "category": "categories.id", // optional + "idToWidget": "string" // optional + }, { + "resourceType": "service", // it will create a study from the service + "expectedKey": "service.key", // required + "title": "string", // required + "icon": "fontAwesome inner link | url", // optional + "newStudyLabel": "string", // optional + "category": "categories.id", // optional + "idToWidget": "string" // optional + }, { + "resourceType": "service", // it will create a study from the service + "myMostUsed": 2, // required + "category": "categories.id", // optional + }, { + "showDisabled": true, // it will show a disabled button on the defined item + "title": "string", // required + "icon": "fontAwesome inner link | url", // optional + "reason": "string", // optional + "newStudyLabel": "string", // optional + "category": "categories.id", // optional + "idToWidget": "string" // optional + }] + */ qx.Class.define("osparc.dashboard.NewPlusMenu", { extend: qx.ui.menu.Menu, construct: function() { this.base(arguments); - osparc.utils.Utils.prettifyMenu(this); - this.set({ + appearance: "menu-wider", position: "bottom-left", spacingX: 20, }); + osparc.utils.Utils.setIdToWidget(this, "newPlusMenu"); + + this.getContentElement().setStyles({ + "border-color": qx.theme.manager.Color.getInstance().resolve("strong-main"), + }); + this.__categoryHeaders = []; this.__addItems(); @@ -47,6 +95,7 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { "replace_me_product_name", osparc.store.StaticInfo.getInstance().getDisplayName() ); + title = title.replace(/
/g, " "); const menuButton = new qx.ui.menu.Button().set({ icon: icon || null, label: title, @@ -62,7 +111,7 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { }); if (infoText) { infoText = osparc.utils.Utils.replaceTokens( - title, + infoText, "replace_me_product_name", osparc.store.StaticInfo.getInstance().getDisplayName() ); @@ -112,27 +161,26 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { }, __addNewStudyItems: async function() { - await Promise.all([ - osparc.store.Products.getInstance().getNewStudyConfig(), - osparc.data.Resources.get("templates") - ]).then(values => { - const newStudiesData = values[0]; - const templates = values[1]; - if (newStudiesData["categories"]) { - this.__addCategories(newStudiesData["categories"]); - } - newStudiesData["resources"].forEach(newStudyData => { - if (newStudyData["showDisabled"]) { - this.__addDisabledButton(newStudyData); - } else if (newStudyData["resourceType"] === "study") { - this.__addEmptyStudyButton(newStudyData); - } else if (newStudyData["resourceType"] === "template") { - this.__addFromTemplateButton(newStudyData, templates); - } else if (newStudyData["resourceType"] === "service") { - this.__addFromServiceButton(newStudyData); - } - }); - }); + const plusButtonConfig = osparc.store.Products.getInstance().getPlusButtonUiConfig(); + if (plusButtonConfig) { + await osparc.data.Resources.get("templates") + .then(templates => { + if (plusButtonConfig["categories"]) { + this.__addCategories(plusButtonConfig["categories"]); + } + plusButtonConfig["resources"].forEach(buttonConfig => { + if (buttonConfig["showDisabled"]) { + this.__addDisabledButton(buttonConfig); + } else if (buttonConfig["resourceType"] === "study") { + this.__addEmptyStudyButton(buttonConfig); + } else if (buttonConfig["resourceType"] === "template") { + this.__addFromTemplateButton(buttonConfig, templates); + } else if (buttonConfig["resourceType"] === "service") { + this.__addFromServiceButton(buttonConfig); + } + }); + }); + } }, __getLastIdxFromCategory: function(categoryId) { @@ -160,12 +208,10 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { __addIcon: function(menuButton, resourceInfo, resourceMetadata) { let source = null; - if (resourceInfo && "icon" in resourceInfo) { - // first the one set in the new_studies + if (resourceInfo && resourceInfo["icon"]) { source = resourceInfo["icon"]; - } else if (resourceMetadata && "thumbnail" in resourceMetadata) { - // second the one from the resource - source = resourceMetadata["thumbnail"]; + } else { + source = osparc.utils.Utils.getIconFromResource(resourceMetadata); } if (source) { @@ -195,92 +241,118 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { } }, - __addDisabledButton: function(newStudyData) { - const menuButton = this.self().createMenuButton(null, newStudyData.title, newStudyData.reason); - osparc.utils.Utils.setIdToWidget(menuButton, newStudyData.idToWidget); + __addDisabledButton: function(buttonConfig) { + const menuButton = this.self().createMenuButton(null, buttonConfig["title"], buttonConfig["reason"]); + osparc.utils.Utils.setIdToWidget(menuButton, buttonConfig["idToWidget"]); menuButton.setEnabled(false); - this.__addIcon(menuButton, newStudyData); - this.__addFromResourceButton(menuButton, newStudyData.category); + this.__addIcon(menuButton, buttonConfig); + this.__addFromResourceButton(menuButton, buttonConfig["category"]); }, - __addEmptyStudyButton: function(newStudyData) { - const menuButton = this.self().createMenuButton(null, newStudyData.title); - osparc.utils.Utils.setIdToWidget(menuButton, newStudyData.idToWidget); + __addEmptyStudyButton: function(buttonConfig) { + const menuButton = this.self().createMenuButton(null, buttonConfig["title"]); + osparc.utils.Utils.setIdToWidget(menuButton, buttonConfig["idToWidget"]); menuButton.addListener("tap", () => { this.fireDataEvent("newEmptyStudyClicked", { - newStudyLabel: newStudyData.newStudyLabel, + newStudyLabel: buttonConfig["newStudyLabel"], }); }); - this.__addIcon(menuButton, newStudyData); - this.__addFromResourceButton(menuButton, newStudyData.category); + this.__addIcon(menuButton, buttonConfig); + this.__addFromResourceButton(menuButton, buttonConfig["category"]); }, - __addFromTemplateButton: function(newStudyData, templates) { - const menuButton = this.self().createMenuButton(null, newStudyData.title); - osparc.utils.Utils.setIdToWidget(menuButton, newStudyData.idToWidget); + __addFromTemplateButton: function(buttonConfig, templates) { + const menuButton = this.self().createMenuButton(null, buttonConfig["title"]); + osparc.utils.Utils.setIdToWidget(menuButton, buttonConfig["idToWidget"]); // disable it until found in templates store menuButton.setEnabled(false); - let templateMetadata = templates.find(t => t.name === newStudyData.expectedTemplateLabel); + let templateMetadata = templates.find(t => t.name === buttonConfig["expectedTemplateLabel"]); if (templateMetadata) { menuButton.setEnabled(true); menuButton.addListener("tap", () => { this.fireDataEvent("newStudyFromTemplateClicked", { templateData: templateMetadata, - newStudyLabel: newStudyData.newStudyLabel, + newStudyLabel: buttonConfig["newStudyLabel"], }); }); - this.__addIcon(menuButton, newStudyData, templateMetadata); - this.__addFromResourceButton(menuButton, newStudyData.category); + this.__addIcon(menuButton, buttonConfig, templateMetadata); + this.__addFromResourceButton(menuButton, buttonConfig["category"]); } }, - __addFromServiceButton: function(newStudyData) { - const menuButton = this.self().createMenuButton(null, newStudyData.title); - osparc.utils.Utils.setIdToWidget(menuButton, newStudyData.idToWidget); - // disable it until found in services store - menuButton.setEnabled(false); + __addFromServiceButton: function(buttonConfig) { + const addListenerToButton = (menuButton, latestMetadata) => { + menuButton.addListener("tap", () => { + this.fireDataEvent("newStudyFromServiceClicked", { + serviceMetadata: latestMetadata, + newStudyLabel: buttonConfig["newStudyLabel"], + }); + }); - const key = newStudyData.expectedKey; - // Include deprecated versions, they should all be updatable to a non deprecated version - const versions = osparc.service.Utils.getVersions(key, false); - if (versions.length && newStudyData) { - // scale to latest compatible - const latestVersion = versions[0]; - const latestCompatible = osparc.service.Utils.getLatestCompatible(key, latestVersion); - osparc.store.Services.getService(latestCompatible["key"], latestCompatible["version"]) - .then(latestMetadata => { - // make sure this one is not deprecated - if (osparc.service.Utils.isDeprecated(latestMetadata)) { - return; - } - menuButton.setEnabled(true); - menuButton.addListener("tap", () => { - this.fireDataEvent("newStudyFromServiceClicked", { - serviceMetadata: latestMetadata, - newStudyLabel: newStudyData.newStudyLabel, - }); + const cb = e => { + this.hide(); + // so that is not consumed by the menu button itself + e.stopPropagation(); + latestMetadata["resourceType"] = "service"; + const resourceDetails = new osparc.dashboard.ResourceDetails(latestMetadata); + const win = osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); + resourceDetails.addListener("openService", ev => { + win.close(); + const openServiceData = ev.getData(); + this.fireDataEvent("newStudyFromServiceClicked", { + serviceMetadata: openServiceData, + newStudyLabel: buttonConfig["newStudyLabel"], }); - - const cb = e => { - this.hide(); - // so that is not consumed by the menu button itself - e.stopPropagation(); - latestMetadata["resourceType"] = "service"; - const resourceDetails = new osparc.dashboard.ResourceDetails(latestMetadata); - osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); + }); + } + const infoButton = new osparc.ui.basic.IconButton(osparc.ui.hint.InfoHint.INFO_ICON + "/16", cb); + // where the shortcut is supposed to go + // eslint-disable-next-line no-underscore-dangle + menuButton._add(infoButton, {column: 2}); + }; + + if ("expectedKey" in buttonConfig) { + const menuButton = this.self().createMenuButton(null, buttonConfig["title"]); + osparc.utils.Utils.setIdToWidget(menuButton, buttonConfig["idToWidget"]); + // disable it until found in services store + menuButton.setEnabled(false); + + const key = buttonConfig["expectedKey"]; + const latestMetadata = osparc.store.Services.getLatest(key); + if (!latestMetadata) { + return; + } + menuButton.setEnabled(true); + this.__addIcon(menuButton, buttonConfig, latestMetadata); + this.__addFromResourceButton(menuButton, buttonConfig["category"]); + addListenerToButton(menuButton, latestMetadata); + } else if ("myMostUsed" in buttonConfig) { + const excludeFrontend = true; + const excludeDeprecated = true + osparc.store.Services.getServicesLatestList(excludeFrontend, excludeDeprecated) + .then(servicesList => { + osparc.service.Utils.sortObjectsBasedOn(servicesList, { + "sort": "hits", + "order": "down" + }); + for (let i=0; i 0) { + const menuButton = new qx.ui.menu.Button().set({ + label: latestMetadata["name"], + font: "text-16", + allowGrowX: true, + }); + this.__addIcon(menuButton, null, latestMetadata); + this.__addFromResourceButton(menuButton, buttonConfig["category"]); + addListenerToButton(menuButton, latestMetadata); + } } - const infoButton = new osparc.ui.basic.IconButton(osparc.ui.hint.InfoHint.INFO_ICON + "/16", cb); - // where the shortcut is supposed to go - // eslint-disable-next-line no-underscore-dangle - menuButton._add(infoButton, {column: 2}); - - this.__addIcon(menuButton, newStudyData, latestMetadata); - this.__addFromResourceButton(menuButton, newStudyData.category); - }) + }); } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js index a4c961b82fc..847d29d84a8 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js @@ -182,8 +182,10 @@ qx.Class.define("osparc.dashboard.NewStudies", { const title = templateInfo.title; const desc = templateInfo.description; const newPlanButton = new osparc.dashboard.GridButtonNew(title, desc); - newPlanButton.setCardKey(templateInfo.idToWidget); - osparc.utils.Utils.setIdToWidget(newPlanButton, templateInfo.idToWidget); + if (templateInfo["idToWidget"]) { + newPlanButton.setCardKey(templateInfo["idToWidget"]); + osparc.utils.Utils.setIdToWidget(newPlanButton, templateInfo["idToWidget"]); + } newPlanButton.addListener("tap", () => newStudyClicked()); return newPlanButton; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index 2ad6467669a..3bf13ec4a46 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -76,6 +76,16 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { this.addListener("appear", () => this._moreResourcesRequired()); }, + properties: { + multiSelection: { + check: "Boolean", + init: false, + nullable: false, + event: "changeMultiSelection", + apply: "_applyMultiSelection" + }, + }, + events: { "changeTab": "qx.event.type.Data", "publishTemplate": "qx.event.type.Data" @@ -90,7 +100,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const isLogged = osparc.auth.Manager.getInstance().isLoggedIn(); if (!isLogged) { const msg = qx.locale.Manager.tr("You need to be logged in to create a study"); - osparc.FlashMessenger.getInstance().logAs(msg); + osparc.FlashMessenger.logAs(msg); } return isLogged; }, @@ -153,10 +163,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { openStudy(); } }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); } else { openStudy(); } @@ -170,6 +177,10 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { return (card instanceof osparc.dashboard.GridButtonItem || card instanceof osparc.dashboard.ListButtonItem); }, + isCardTaskPlaceholder: function(card) { + return (card instanceof osparc.dashboard.GridButtonTaskPlaceholder || card instanceof osparc.dashboard.ListButtonTaskPlaceholder); + }, + createToolbarRadioButton: function(label, icon, toolTipText, pos) { const rButton = new qx.ui.toolbar.RadioButton().set({ label, @@ -446,7 +457,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { let isLogged = osparc.auth.Manager.getInstance().isLoggedIn(); if (!isLogged) { const msg = this.tr("You need to be logged in to create a study"); - osparc.FlashMessenger.getInstance().logAs(msg); + osparc.FlashMessenger.logAs(msg); } return isLogged; }, @@ -467,8 +478,26 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { } }, - _taskDataReceived: function(taskData) { - throw new Error("Abstract method called!"); + _addTaskCard: function(task, cardTitle, cardIcon) { + if (task) { + const taskPlaceholders = this._resourcesContainer.getCards().filter(card => osparc.dashboard.ResourceBrowserBase.isCardTaskPlaceholder(card)); + if (taskPlaceholders.find(taskPlaceholder => taskPlaceholder.getTask() === task)) { + return null; + } + } + + const isGrid = this._resourcesContainer.getMode() === "grid"; + const taskCard = isGrid ? new osparc.dashboard.GridButtonTaskPlaceholder() : new osparc.dashboard.ListButtonTaskPlaceholder(); + taskCard.setTask(task); + taskCard.buildLayout( + cardTitle, + cardIcon + (isGrid ? "/60" : "/24"), + null, + true + ); + taskCard.subscribeToFilterGroup("searchBarFilter"); + this._resourcesContainer.addNonResourceCard(taskCard); + return taskCard; }, _populateCardMenu: function(card) { @@ -494,12 +523,147 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { this.self().startStudyById(studyId, openCB, cancelCB, isStudyCreation); }, - _createStudyFromTemplate: function() { - throw new Error("Abstract method called!"); + _createStudyFromTemplate: function(templateData) { + if (!this._checkLoggedIn()) { + return; + } + + const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); + this._showLoadingPage(this.tr("Creating ") + (templateData.name || studyAlias)); + + if (osparc.desktop.credits.Utils.areWalletsEnabled()) { + const studyOptions = new osparc.study.StudyOptions(); + // they will be patched once the study is created + studyOptions.setPatchStudy(false); + studyOptions.setStudyData(templateData); + studyOptions.getChildControl("open-button").setLabel(this.tr("New")); + const win = osparc.study.StudyOptions.popUpInWindow(studyOptions); + win.moveItUp(); + const cancelStudyOptions = () => { + this._hideLoadingPage(); + win.close(); + } + win.addListener("cancel", () => cancelStudyOptions()); + studyOptions.addListener("cancel", () => cancelStudyOptions()); + studyOptions.addListener("startStudy", () => { + const newName = studyOptions.getChildControl("title-field").getValue(); + const walletSelection = studyOptions.getChildControl("wallet-selector").getSelection(); + const nodesPricingUnits = studyOptions.getChildControl("study-pricing-units").getNodePricingUnits(); + win.close(); + + this._showLoadingPage(this.tr("Creating ") + (newName || studyAlias)); + osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage) + .then(newStudyData => { + const studyId = newStudyData["uuid"]; + const openCB = () => { + this._hideLoadingPage(); + }; + const cancelCB = () => { + this._hideLoadingPage(); + const params = { + url: { + studyId + } + }; + osparc.data.Resources.fetch("studies", "delete", params); + }; + + const promises = []; + // patch the name + if (newStudyData["name"] !== newName) { + promises.push(osparc.study.StudyOptions.updateName(newStudyData, newName)); + } + // patch the wallet + if (walletSelection.length && walletSelection[0]["walletId"]) { + const walletId = walletSelection[0]["walletId"]; + promises.push(osparc.study.StudyOptions.updateWallet(newStudyData["uuid"], walletId)); + } + // patch the pricing units + // the nodeIds are coming from the original template, they need to be mapped to the newStudy + const workbench = newStudyData["workbench"]; + const nodesIdsListed = []; + Object.keys(workbench).forEach(nodeId => { + const nodeData = workbench[nodeId]; + if (osparc.study.StudyPricingUnits.includeInList(nodeData)) { + nodesIdsListed.push(nodeId); + } + }); + nodesPricingUnits.forEach((nodePricingUnits, idx) => { + const selectedPricingUnitId = nodePricingUnits.getPricingUnits().getSelectedUnitId(); + if (selectedPricingUnitId) { + const nodeId = nodesIdsListed[idx]; + const pricingPlanId = nodePricingUnits.getPricingPlanId(); + promises.push(osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId)); + } + }); + + Promise.all(promises) + .then(() => { + win.close(); + const showStudyOptions = false; + this._startStudyById(studyId, openCB, cancelCB, showStudyOptions); + }); + }) + .catch(err => { + this._hideLoadingPage(); + osparc.FlashMessenger.logError(err); + }); + }); + } else { + osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage) + .then(newStudyData => { + const studyId = newStudyData["uuid"]; + const openCB = () => this._hideLoadingPage(); + const cancelCB = () => { + this._hideLoadingPage(); + const params = { + url: { + studyId + } + }; + osparc.data.Resources.fetch("studies", "delete", params); + }; + const isStudyCreation = true; + this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); + }) + .catch(err => { + this._hideLoadingPage(); + osparc.FlashMessenger.logError(err); + }); + } }, - _createStudyFromService: function() { - throw new Error("Abstract method called!"); + _createStudyFromService: function(key, version) { + if (!this._checkLoggedIn()) { + return; + } + + const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); + this._showLoadingPage(this.tr("Creating ") + studyAlias); + + osparc.study.Utils.createStudyFromService(key, version) + .then(studyId => { + const openCB = () => this._hideLoadingPage(); + const cancelCB = () => { + this._hideLoadingPage(); + const params = { + url: { + studyId + } + }; + osparc.data.Resources.fetch("studies", "delete", params); + }; + const isStudyCreation = true; + this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); + }) + .catch(err => { + this._hideLoadingPage(); + osparc.FlashMessenger.logError(err); + }); + }, + + _applyMultiSelection: function(value) { + return; }, _deleteResourceRequested: function(resourceId) { @@ -555,7 +719,9 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }, _getOpenMenuButton: function(resourceData) { - const openButton = new qx.ui.menu.Button(this.tr("Open")); + const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); + const openText = (resourceData["resourceType"] === "study") ? this.tr("Open") : this.tr("New") + " " + studyAlias; + const openButton = new qx.ui.menu.Button(openText); openButton["openResourceButton"] = true; openButton.addListener("execute", () => { switch (resourceData["resourceType"]) { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js index 55ac1f85697..fe82d290aaa 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js @@ -248,15 +248,17 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { container.add(card); if (this.getMode() === "list") { + const fitToContainer = () => { + const bounds = container.getBounds() || container.getSizeHint(); + card.setWidth(bounds.width); + }; [ "appear", "resize", ].forEach(ev => { - container.addListener(ev, () => { - const bounds = container.getBounds() || container.getSizeHint(); - card.setWidth(bounds.width); - }); + container.addListener(ev, () => fitToContainer()); }); + fitToContainer(); } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js index 5f6bb97a02e..74ae7a7538d 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js @@ -23,29 +23,58 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this.__resourceData = resourceData; - this.__resourceModel = null; + let latestPromise = null; switch (resourceData["resourceType"]) { case "study": - case "template": - this.__resourceModel = new osparc.data.model.Study(resourceData); + case "template": { + const params = { + url: { + "studyId": resourceData["uuid"] + } + }; + latestPromise = osparc.data.Resources.fetch("studies", "getOne", params); break; - case "service": - this.__resourceModel = new osparc.data.model.Service(resourceData); + } + case "service": { + latestPromise = osparc.store.Services.getService(resourceData["key"], resourceData["version"]); break; + } } - this.__resourceModel["resourceType"] = resourceData["resourceType"]; - this.__addPages(); + latestPromise + .then(latestResourceData => { + this.__resourceData = latestResourceData; + this.__resourceData["resourceType"] = resourceData["resourceType"]; + switch (resourceData["resourceType"]) { + case "study": + case "template": { + osparc.store.Services.getStudyServicesMetadata(latestResourceData) + .then(() => { + this.__resourceModel = new osparc.data.model.Study(latestResourceData); + this.__resourceModel["resourceType"] = resourceData["resourceType"]; + this.__addPages(); + }) + break; + } + case "service": { + this.__resourceModel = new osparc.data.model.Service(latestResourceData); + this.__resourceModel["resourceType"] = resourceData["resourceType"]; + this.__addPages(); + break; + } + } + }) + .catch(err => osparc.FlashMessenger.logError(err)); }, events: { - "openStudy": "qx.event.type.Data", + "pagesAdded": "qx.event.type.Event", "openTemplate": "qx.event.type.Data", "openService": "qx.event.type.Data", "updateStudy": "qx.event.type.Data", "updateTemplate": "qx.event.type.Data", "updateService": "qx.event.type.Data", - "publishTemplate": "qx.event.type.Data" + "publishTemplate": "qx.event.type.Data", }, @@ -93,7 +122,6 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { __resourceData: null, __resourceModel: null, __infoPage: null, - __dataPage: null, __servicesUpdatePage: null, __permissionsPage: null, __tagsPage: null, @@ -164,10 +192,15 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { "studyId": this.__resourceData["uuid"] } }; - osparc.data.Resources.getOne("studies", params) - .then(updatedStudyData => { + Promise.all([ + osparc.data.Resources.fetch("studies", "getOne", params), + osparc.data.Resources.fetch("studies", "getServices", params) + ]) + .then(values => { + const updatedStudyData = values[0]; + const studyServices = values[1]; openButton.setFetching(false); - const updatableServices = osparc.metadata.ServicesInStudyUpdate.updatableNodeIds(updatedStudyData.workbench); + const updatableServices = osparc.study.Utils.updatableNodeIds(updatedStudyData.workbench, studyServices["services"]); if (updatableServices.length && osparc.data.model.Study.canIWrite(updatedStudyData["accessRights"])) { this.__confirmUpdate(); } else { @@ -175,8 +208,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { } }) .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); openButton.setFetching(false); }); }, @@ -189,12 +221,13 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { }); win.center(); win.open(); - win.addListenerOnce("close", () => { + win.addListener("changeConfirmed", e => { if (win.getConfirmed()) { this.openUpdateServices(); } else { this.__openResource(); } + win.close(); }); }, @@ -216,10 +249,6 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this._openPage(this.__infoPage); }, - openData: function() { - this._openPage(this.__dataPage); - }, - openUpdateServices: function() { this._openPage(this.__servicesUpdatePage); }, @@ -258,34 +287,31 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { hBox.add(versionsBox); - const versions = osparc.service.Utils.getVersions(this.__resourceData["key"]); - let selectedItem = null; - - // first setSelection - versions.forEach(version => { - selectedItem = osparc.service.Utils.versionToListItem(this.__resourceData["key"], version); - versionsBox.add(selectedItem); - if (this.__resourceData["version"] === version) { - versionsBox.setSelection([selectedItem]); - } - }); - osparc.utils.Utils.growSelectBox(versionsBox, 200); - - // then listen to changes - versionsBox.addListener("changeSelection", e => { - const selection = e.getData(); - if (selection.length) { - const serviceVersion = selection[0].version; - if (serviceVersion !== this.__resourceData["version"]) { - osparc.store.Services.getService(this.__resourceData["key"], serviceVersion) - .then(serviceData => { - serviceData["resourceType"] = "service"; - this.__resourceData = serviceData; - this.__addPages(); - }); + osparc.store.Services.populateVersionsSelectBox(this.__resourceData["key"], versionsBox) + .then(() => { + // first setSelection + const versionFound = versionsBox.getSelectables().find(selectable => selectable.version === this.__resourceData["version"]); + if (versionFound) { + versionsBox.setSelection([versionFound]); } - } - }, this); + osparc.utils.Utils.growSelectBox(versionsBox, 200); + + // then listen to changes + versionsBox.addListener("changeSelection", e => { + const selection = e.getData(); + if (selection.length) { + const serviceVersion = selection[0].version; + if (serviceVersion !== this.__resourceData["version"]) { + osparc.store.Services.getService(this.__resourceData["key"], serviceVersion) + .then(serviceData => { + serviceData["resourceType"] = "service"; + this.__resourceData = serviceData; + this.__addPages(); + }); + } + } + }, this); + }); return hBox; }, @@ -306,7 +332,6 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this.__getBillingPage, this.__getServicesUpdatePage, this.__getServicesBootOptionsPage, - this.__getDataPage, this.__getCommentsPage, this.__getPermissionsPage, this.__getSaveAsTemplatePage, @@ -323,12 +348,36 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { } }); + const resourceData = this.__resourceData; + if (!osparc.utils.Resources.isService(resourceData)) { + const title = osparc.product.Utils.getStudyAlias({firstUpperCase: true}) + this.tr(" Files..."); + const iconSrc = "@FontAwesome5Solid/file/22"; + const dataAccess = new qx.ui.basic.Atom().set({ + label: title, + icon: iconSrc, + font: "text-14", + padding: 8, + paddingLeft: 12, + gap: 14, + cursor: "pointer", + }); + dataAccess.addListener("tap", () => osparc.widget.StudyDataManager.popUpInWindow(resourceData["uuid"])); + this.addWidgetToTabs(dataAccess); + + if (resourceData["resourceType"] === "study") { + const canShowData = osparc.study.Utils.canShowStudyData(resourceData); + dataAccess.setEnabled(canShowData); + } + } + if (selectedTabId) { const pageFound = tabsView.getChildren().find(page => page.tabId === selectedTabId); if (pageFound) { tabsView.setSelection([pageFound]); } } + + this.fireEvent("pagesAdded"); }, __getInfoPage: function() { @@ -431,7 +480,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { if ( osparc.utils.Resources.isService(resourceData) || !osparc.product.Utils.showStudyPreview() || - osparc.data.model.Study.getUiMode(resourceData) === "app" + !(osparc.study.Utils.getUiMode(resourceData) === "workbench") ) { // there is no pipelining or don't show it return null; @@ -486,33 +535,6 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { return page; }, - __getDataPage: function() { - const resourceData = this.__resourceData; - if (osparc.utils.Resources.isService(resourceData)) { - return null; - } - - const id = "Data"; - const title = osparc.product.Utils.getStudyAlias({firstUpperCase: true}) + this.tr(" Files"); - const iconSrc = "@FontAwesome5Solid/file/22"; - const page = this.__dataPage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); - - if (this.__resourceData["resourceType"] === "study") { - const studyData = this.__resourceData; - const canBeOpened = osparc.study.Utils.canShowStudyData(studyData); - page.setEnabled(canBeOpened); - } - - const lazyLoadContent = () => { - const studyDataManager = new osparc.widget.NodeDataManager(resourceData["uuid"]); - page.addToContent(studyDataManager); - } - page.addListenerOnce("appear", lazyLoadContent, this); - - return page; - }, - __getPermissionsPage: function() { const id = "Permissions"; const title = this.tr("Sharing"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js index 2b35fcad22d..352e45955a6 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js @@ -54,21 +54,31 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { __buildLayout: function() { const filtersSpacer = new qx.ui.core.Spacer(10, 10); switch (this.__resourceType) { - case "study": + case "study": { this._add(this.__createWorkspacesAndFoldersTree()); this._add(this.__createTrashBin()); - this._add(this.__createResourceTypeContextButtons()); + // this._add(this.__createResourceTypeContextButtons()); this._add(filtersSpacer); - this._add(this.__createTagsFilterLayout()); + const scrollView = new qx.ui.container.Scroll(); + scrollView.add(this.__createTagsFilterLayout()); + this._add(scrollView, { + flex: 1 + }); break; - case "template": - this._add(this.__createResourceTypeContextButtons()); + } + case "template": { + // this._add(this.__createResourceTypeContextButtons()); this._add(filtersSpacer); this._add(this.__createSharedWithFilterLayout()); - this._add(this.__createTagsFilterLayout()); + const scrollView = new qx.ui.container.Scroll(); + scrollView.add(this.__createTagsFilterLayout()); + this._add(scrollView, { + flex: 1 + }); break; + } case "service": - this._add(this.__createResourceTypeContextButtons()); + // this._add(this.__createResourceTypeContextButtons()); this._add(filtersSpacer); this._add(this.__createSharedWithFilterLayout()); this._add(this.__createServiceTypeFilterLayout()); @@ -114,7 +124,7 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { const trashButton = this.__trashButton = new qx.ui.toolbar.RadioButton().set({ value: false, appearance: "filter-toggle-button", - label: this.tr("Bin"), + label: this.tr("Recently Deleted"), icon: "@FontAwesome5Solid/trash-alt/16", paddingLeft: 10, // align it with the context }); @@ -350,15 +360,15 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { /* TAGS */ __createTagsFilterLayout: function() { - const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(2)); - osparc.utils.Utils.setIdToWidget(layout, this.__resourceType + "-tagsFilter"); + const tagsLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(2)); + osparc.utils.Utils.setIdToWidget(tagsLayout, this.__resourceType + "-tagsFilter"); - this.__populateTags(layout, []); + this.__populateTags(tagsLayout, []); osparc.store.Tags.getInstance().addListener("tagsChanged", () => { - this.__populateTags(layout, this.__getSelectedTagIds()); + this.__populateTags(tagsLayout, this.__getSelectedTagIds()); }, this); - return layout; + return tagsLayout; }, __getSelectedTagIds: function() { @@ -366,14 +376,15 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { return selectedTagIds; }, - __populateTags: function(layout, selectedTagIds) { + __populateTags: function(tagsLayout, selectedTagIds) { const maxTags = 5; this.__tagButtons = []; - layout.removeAll(); + tagsLayout.removeAll(); osparc.store.Tags.getInstance().getTags().forEach((tag, idx) => { const button = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/tag/16"); button.id = tag.getTagId(); tag.bind("name", button, "label"); + tag.bind("name", button, "toolTipText"); tag.bind("color", button.getChildControl("icon"), "textColor"); osparc.utils.Utils.setIdToWidget(button, this.__resourceType + "-tagFilterItem"); button.set({ @@ -381,7 +392,7 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { value: selectedTagIds.includes(tag.getTagId()) }); - layout.add(button); + tagsLayout.add(button); button.addListener("execute", () => { const selection = this.__getSelectedTagIds(); @@ -411,7 +422,7 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { showAllButton.showingAll = true; } }); - layout.add(showAllButton); + tagsLayout.add(showAllButton); } const editTagsButton = new qx.ui.form.Button(this.tr("Edit Tags..."), "@FontAwesome5Solid/pencil-alt/14"); @@ -419,13 +430,13 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { appearance: "filter-toggle-button" }); editTagsButton.addListener("execute", () => { - const preferencesWindow = osparc.desktop.preferences.PreferencesWindow.openWindow(); - preferencesWindow.openTags(); + const myAccountWindow = osparc.desktop.account.MyAccountWindow.openWindow(); + myAccountWindow.openTags(); }); - layout.add(editTagsButton); + tagsLayout.add(editTagsButton); if (this.__resourceType === "study") { - layout.getChildren().forEach(item => item.setPaddingLeft(10)); // align them with the context + tagsLayout.getChildren().forEach(item => item.setPaddingLeft(10)); // align them with the context } }, /* /TAGS */ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceUpgradeHelper.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceUpgradeHelper.js index 882389c1ce0..43f39c55419 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceUpgradeHelper.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceUpgradeHelper.js @@ -37,7 +37,6 @@ qx.Class.define("osparc.dashboard.ResourceUpgradeHelper", { this.bind("secondaryText", secondaryButton, "label"); secondaryButton.addListener("execute", () => { this.setConfirmed(false); - this.close(1); }, this); this.addButton(secondaryButton); @@ -50,7 +49,6 @@ qx.Class.define("osparc.dashboard.ResourceUpgradeHelper", { this.bind("primaryText", primaryButton, "label"); primaryButton.addListener("execute", () => { this.setConfirmed(true); - this.close(1); }, this); const command = new qx.ui.command.Command("Enter"); primaryButton.setCommand(command); @@ -86,7 +84,8 @@ qx.Class.define("osparc.dashboard.ResourceUpgradeHelper", { confirmed: { check: "Boolean", - init: false + init: null, + event: "changeConfirmed" } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js index fec523a2341..6c29b754cf6 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js @@ -32,25 +32,30 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { this.__sortBy = osparc.service.SortServicesButtons.DefaultSorting; }, - properties: { - multiSelection: { - check: "Boolean", - init: false, - nullable: false, - event: "changeMultiSelection", - apply: "__applyMultiSelection" - } - }, - members: { __sortBy: null, // overridden initResources: function() { this._resourcesList = []; - this.getChildControl("resources-layout"); - this.reloadResources(); - this._hideLoadingPage(); + osparc.store.Services.getServicesLatest() + .then(services => { + // Show "Contact Us" message if services.length === 0 + // Most probably is a product-stranger user (it can also be that the catalog is down) + if (Object.keys(services).length === 0) { + let msg = this.tr("It seems you don't have access to this product."); + msg += "
"; + msg += this.tr("Please contact us:"); + msg += "
"; + const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); + msg += supportEmail; + osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + } + + this.getChildControl("resources-layout"); + this.reloadResources(); + this._hideLoadingPage(); + }); }, reloadResources: function() { @@ -98,36 +103,6 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { this.resetSelection(); }, - _createStudyFromService: function(key, version) { - if (!this._checkLoggedIn()) { - return; - } - - const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); - this._showLoadingPage(this.tr("Creating ") + studyAlias); - - osparc.study.Utils.createStudyFromService(key, version) - .then(studyId => { - const openCB = () => this._hideLoadingPage(); - const cancelCB = () => { - this._hideLoadingPage(); - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "delete", params); - }; - const isStudyCreation = true; - this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); - }) - .catch(err => { - this._hideLoadingPage(); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); - }); - }, - // LAYOUT // _createLayout: function() { this._createSearchBar(); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 9c04ac3ef7d..6328a0c5ade 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -15,10 +15,6 @@ ************************************************************************ */ -/** - * @asset(osparc/new_studies.json") - */ - /** * Widget that shows lists user's studies. * @@ -73,14 +69,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { event: "changeCurrentFolderId" }, - multiSelection: { - check: "Boolean", - init: false, - nullable: false, - event: "changeMultiSelection", - apply: "__applyMultiSelection" - }, - // Ordering by Possibilities: // field: type | uuid | name | description | prj_owner | creation_date | last_change_date // direction: asc | desc @@ -127,10 +115,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.notification.Notifications.getInstance().addNotifications(notifications); }); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, __getActiveStudy: function() { @@ -197,8 +182,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__setWorkspacesToList(workspaces); if (this.getCurrentContext() === "trash") { if (workspaces.length) { - // Not yet implemented - // this.__header.getChildControl("empty-trash-button").show(); + this.__header.getChildControl("empty-trash-button").show(); } } }) @@ -244,8 +228,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__setFoldersToList(folders); if (this.getCurrentContext() === "trash") { if (folders.length) { - // Not yet implemented - // this.__header.getChildControl("empty-trash-button").show(); + this.__header.getChildControl("empty-trash-button").show(); } } }) @@ -268,34 +251,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return; } - osparc.data.Resources.get("tasks") - .then(tasks => { - if (tasks && tasks.length) { - this.__tasksReceived(tasks); - } - }); - - // Show "Contact Us" message if services.length === 0 - // Most probably is a product-stranger user (it can also be that the catalog is down) - osparc.store.Services.getServicesLatest() - .then(services => { - if (Object.keys(services).length === 0) { - const noAccessText = new qx.ui.basic.Label().set({ - selectable: true, - rich: true, - font: "text-18", - paddingTop: 20 - }); - let msg = this.tr("It seems you don't have access to this product."); - msg += "
"; - msg += "
"; - msg += this.tr("Please contact us:"); - msg += "
"; - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); - noAccessText.setValue(msg + supportEmail); - this._addToLayout(noAccessText); - } - }); + this.__tasksToCards(); this._loadingResourcesBtn.setFetching(true); this._loadingResourcesBtn.setVisibility("visible"); @@ -316,8 +272,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (this.getCurrentContext() === "trash") { if (this._resourcesList.length) { - // Not yet implemented - // this.__header.getChildControl("empty-trash-button").show(); + this.__header.getChildControl("empty-trash-button").show(); } } @@ -345,8 +300,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } }) .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); // stop fetching if (this._resourcesContainer.getFlatList()) { this._resourcesContainer.getFlatList().nextRequest = null; @@ -390,7 +344,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { studyId } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(studyData => { this.__studyStateReceived(study["uuid"], studyData["state"]); }); @@ -476,14 +430,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.store.Workspaces.getInstance().trashWorkspace(workspaceId) .then(() => { this.__reloadWorkspaces(); - const msg = this.tr("Successfully moved to Bin"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + const msg = this.tr("Successfully deleted"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.setTrashEmpty(false); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, _untrashWorkspaceRequested: function(workspace) { @@ -491,13 +442,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(() => { this.__reloadWorkspaces(); const msg = this.tr("Successfully restored"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.evaluateTrashEmpty(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, _deleteWorkspaceRequested: function(workspaceId) { @@ -505,13 +453,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(() => { this.__reloadWorkspaces(); const msg = this.tr("Successfully deleted"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.evaluateTrashEmpty(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) }, // /WORKSPACES @@ -591,10 +536,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.store.Folders.getInstance().moveFolderToWorkspace(folderId, destWorkspaceId) // first move to workspace .then(() => osparc.store.Folders.getInstance().moveFolderToFolder(folderId, destFolderId)) // then move to folder .then(() => this.__reloadFolders()) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, __folderToFolderRequested: function(folderId, workspaceId, destWorkspaceId, destFolderId) { @@ -614,23 +556,20 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.store.Folders.getInstance().trashFolder(folderId, this.getCurrentWorkspaceId()) .then(() => { this.__reloadFolders(); - const msg = this.tr("Successfully moved to Bin"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + const msg = this.tr("Successfully deleted"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.setTrashEmpty(false); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, _trashFolderRequested: function(folderId) { const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); - let msg = this.tr("Are you sure you want to move the Folder and all its content to the Bin?"); + let msg = this.tr("Are you sure you want to delete the Folder and all its content?"); msg += "

" + this.tr("It will be permanently deleted after ") + trashDays + " days."; const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ - caption: this.tr("Move to Bin"), - confirmText: this.tr("Move to Bin"), + caption: this.tr("Delete"), + confirmText: this.tr("Delete"), confirmAction: "warning", }); confirmationWin.center(); @@ -647,13 +586,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(() => { this.__reloadFolders(); const msg = this.tr("Successfully restored"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.evaluateTrashEmpty(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) }, _deleteFolderRequested: function(folderId) { @@ -661,7 +597,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(() => { this.__reloadFolders(); const msg = this.tr("Successfully deleted"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.evaluateTrashEmpty(); }) .catch(err => console.error(err)); @@ -939,36 +875,83 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (!osparc.product.Utils.hasNewPlusButton()) { switch (osparc.product.Utils.getProductName()) { - case "osparc": - this.__addEmptyStudyPlusButton(); - break; case "tis": case "tiplite": + // this one is different since it groups all new buttons in one new button this.__addTIPPlusButton(); break; - case "s4l": - case "s4lacad": - case "s4llite": - this.__addPlusButtonsFromServices(); + default: + this.__addPlusButtons(); break; } } }, - __addEmptyStudyPlusButton: function() { + __addPlusButtons: function() { + const plusButtonConfig = osparc.store.Products.getInstance().getNewStudiesUiConfig(); + if (plusButtonConfig) { + plusButtonConfig["resources"].forEach(newStudyData => { + if (newStudyData["resourceType"] === "study") { + this.__addEmptyStudyPlusButton(newStudyData); + } else if (newStudyData["resourceType"] === "service") { + this.__addNewStudyFromServiceButton(newStudyData); + } + }); + } + }, + + __addEmptyStudyPlusButton: function(newStudyData) { const mode = this._resourcesContainer.getMode(); - const title = this.tr("Empty") + " " + osparc.product.Utils.getStudyAlias({ + const defTitle = this.tr("Empty") + " " + osparc.product.Utils.getStudyAlias({ firstUpperCase: true - }) - const desc = this.tr("Start with an empty study"); + }); + const title = newStudyData["title"] || defTitle; + const desc = newStudyData["description"] || this.tr("Start with an empty study"); const newEmptyStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title, desc) : new osparc.dashboard.ListButtonNew(title, desc); newEmptyStudyBtn.setCardKey("new-study"); newEmptyStudyBtn.subscribeToFilterGroup("searchBarFilter"); - osparc.utils.Utils.setIdToWidget(newEmptyStudyBtn, "emptyStudyBtn"); - newEmptyStudyBtn.addListener("tap", () => this.__newEmptyStudyBtnClicked("New Study")); + osparc.utils.Utils.setIdToWidget(newEmptyStudyBtn, newStudyData["idToWidget"]); + newEmptyStudyBtn.addListener("tap", () => this.__newEmptyStudyBtnClicked(newStudyData["newStudyLabel"])); this._resourcesContainer.addNonResourceCard(newEmptyStudyBtn); }, + __addNewStudyFromServiceButton: function(newStudyData) { + if ("expectedKey" in newStudyData) { + const key = newStudyData["expectedKey"]; + const latestMetadata = osparc.store.Services.getLatest(key); + if (!latestMetadata) { + return; + } + const title = newStudyData.title + " " + osparc.service.Utils.extractVersionDisplay(latestMetadata); + const desc = newStudyData.description; + const mode = this._resourcesContainer.getMode(); + const newStudyFromServiceButton = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title, desc) : new osparc.dashboard.ListButtonNew(title, desc); + newStudyFromServiceButton.setCardKey("new-"+key); + if (newStudyData["idToWidget"]) { + osparc.utils.Utils.setIdToWidget(newStudyFromServiceButton, newStudyData["idToWidget"]); + } + newStudyFromServiceButton.addListener("tap", () => this.__newStudyFromServiceBtnClicked(latestMetadata["key"], latestMetadata["version"], newStudyData.newStudyLabel)); + this._resourcesContainer.addNonResourceCard(newStudyFromServiceButton); + } else if ("myMostUsed" in newStudyData) { + const excludeFrontend = true; + const excludeDeprecated = true + osparc.store.Services.getServicesLatestList(excludeFrontend, excludeDeprecated) + .then(servicesList => { + osparc.service.Utils.sortObjectsBasedOn(servicesList, { + "sort": "hits", + "order": "down" + }); + for (let i=0; i this.__newStudyFromServiceBtnClicked(latestMetadata["key"], latestMetadata["version"], latestMetadata["name"])); + this._resourcesContainer.addNonResourceCard(newStudyFromServiceButton); + } + }); + } + }, + __addTIPPlusButton: function() { const mode = this._resourcesContainer.getMode(); const title = this.tr("New Plan"); @@ -979,87 +962,44 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._resourcesContainer.addNonResourceCard(newPlansBtn); newPlansBtn.setEnabled(false); - osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json") - .then(newStudiesData => { - const product = osparc.product.Utils.getProductName() - if (product in newStudiesData) { - newPlansBtn.setEnabled(true); - - newPlansBtn.addListener("tap", () => { - osparc.data.Resources.get("templates") - .then(templates => { - if (templates) { - const newStudies = new osparc.dashboard.NewStudies(newStudiesData[product]); - newStudies.addListener("templatesLoaded", () => { - newStudies.setGroupBy("category"); - const winTitle = this.tr("New Plan"); - const win = osparc.ui.window.Window.popUpInWindow(newStudies, winTitle, osparc.dashboard.NewStudies.WIDTH+40, 300).set({ - clickAwayClose: false, - resizable: true - }); - newStudies.addListener("newStudyClicked", e => { - win.close(); - const templateInfo = e.getData(); - const templateData = templates.find(t => t.name === templateInfo.expectedTemplateLabel); - if (templateData) { - this.__newPlanBtnClicked(templateData, templateInfo.newStudyLabel); - } - }); - osparc.utils.Utils.setIdToWidget(win, "newStudiesWindow"); - }); - } + const newStudiesConfig = osparc.store.Products.getInstance().getNewStudiesUiConfig(); + if (newStudiesConfig) { + newPlansBtn.setEnabled(true); + + newPlansBtn.addListener("tap", () => { + osparc.data.Resources.get("templates") + .then(templates => { + if (templates) { + const newStudies = new osparc.dashboard.NewStudies(newStudiesConfig); + newStudies.addListener("templatesLoaded", () => { + newStudies.setGroupBy("category"); + const winTitle = this.tr("New Plan"); + const win = osparc.ui.window.Window.popUpInWindow(newStudies, winTitle, osparc.dashboard.NewStudies.WIDTH+40, 300).set({ + clickAwayClose: false, + resizable: true + }); + newStudies.addListener("newStudyClicked", e => { + win.close(); + const templateInfo = e.getData(); + const templateData = templates.find(t => t.name === templateInfo.expectedTemplateLabel); + if (templateData) { + this.__newPlanBtnClicked(templateData, templateInfo.newStudyLabel); + } + }); + osparc.utils.Utils.setIdToWidget(win, "newStudiesWindow"); }); + } }); - } }); - }, - - // Used in S4L products - __addNewStudyFromServiceButtons: function(key, newButtonInfo) { - // Include deprecated versions, they should all be updatable to a non deprecated version - const versions = osparc.service.Utils.getVersions(key, false); - if (versions.length && newButtonInfo) { - // scale to latest compatible - const latestVersion = versions[0]; - const latestCompatible = osparc.service.Utils.getLatestCompatible(key, latestVersion); - osparc.store.Services.getService(latestCompatible["key"], latestCompatible["version"]) - .then(latestMetadata => { - // make sure this one is not deprecated - if (osparc.service.Utils.isDeprecated(latestMetadata)) { - return; - } - const title = newButtonInfo.title + " " + osparc.service.Utils.extractVersionDisplay(latestMetadata); - const desc = newButtonInfo.description; - const mode = this._resourcesContainer.getMode(); - const newStudyFromServiceButton = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title, desc) : new osparc.dashboard.ListButtonNew(title, desc); - newStudyFromServiceButton.setCardKey("new-"+key); - osparc.utils.Utils.setIdToWidget(newStudyFromServiceButton, newButtonInfo.idToWidget); - newStudyFromServiceButton.addListener("tap", () => this.__newStudyFromServiceBtnClicked(latestMetadata["key"], latestMetadata["version"], newButtonInfo.newStudyLabel)); - this._resourcesContainer.addNonResourceCard(newStudyFromServiceButton); - }) } }, - __addPlusButtonsFromServices: function() { - // add new plus buttons if key services exists - osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json") - .then(newStudiesData => { - const product = osparc.product.Utils.getProductName() - if (product in newStudiesData) { - const newButtonsInfo = newStudiesData[product].resources; - newButtonsInfo.forEach(newButtonInfo => { - this.__addNewStudyFromServiceButtons(newButtonInfo.expectedKey, newButtonInfo); - }); - } - }); - }, - // LAYOUT // _createLayout: function() { this._createSearchBar(); const header = this.__header = new osparc.dashboard.StudyBrowserHeader(); - this.__header.addListener("emptyTrashRequested", () => this.__emptyTrash(), this); + this.__header.addListener("trashEmptied", () => this.reloadResources(), this); this._addToLayout(header); this._createResourcesLayout("studiesList"); @@ -1119,17 +1059,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { studiesMoveButton.set({ visibility: selection.length && currentContext === "studiesAndFolders" ? "visible" : "excluded", - label: this.tr("Move") + (selection.length > 1 ? this.tr(" selected ") + `(${selection.length})` : ""), + label: this.tr("Move") + (selection.length > 1 ? ` (${selection.length})` : ""), }); studiesTrashButton.set({ visibility: selection.length && currentContext === "studiesAndFolders" ? "visible" : "excluded", - label: this.tr("Move to Bin") + (selection.length > 1 ? this.tr(" selected ") + `(${selection.length})` : ""), + label: this.tr("Delete") + (selection.length > 1 ? ` (${selection.length})` : ""), }); studiesDeleteButton.set({ visibility: selection.length && currentContext === "trash" ? "visible" : "excluded", - label: this.tr("Delete permanently") + (selection.length > 1 ? this.tr(" selected ") + `(${selection.length})` : ""), + label: this.tr("Delete permanently") + (selection.length > 1 ? ` (${selection.length})` : ""), }); }); @@ -1388,7 +1328,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createTrashStudiesButton: function() { - const trashButton = new qx.ui.form.Button(this.tr("Move to Bin"), "@FontAwesome5Solid/trash/14").set({ + const trashButton = new qx.ui.form.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/14").set({ appearance: "warning-button", visibility: "excluded" }); @@ -1437,19 +1377,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return deleteButton; }, - __emptyTrash: function() { - const win = this.__createConfirmEmptyTrashWindow(); - win.center(); - win.open(); - win.addListener("close", () => { - if (win.getConfirmed()) { - osparc.data.Resources.fetch("trash", "delete") - .then(() => { - this.__resetStudiesList(); - }); - } - }, this); - }, + __createSelectButton: function() { const selectButton = new qx.ui.form.ToggleButton().set({ @@ -1470,7 +1398,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return selectButton; }, - __applyMultiSelection: function(value) { + // override + _applyMultiSelection: function(value) { this._resourcesContainer.getCards().forEach(studyItem => { if (osparc.dashboard.ResourceBrowserBase.isCardButtonItem(studyItem)) { studyItem.setMultiSelectionMode(value); @@ -1526,8 +1455,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(studyData => this.__startStudyAfterCreating(studyData["uuid"])) .catch(err => { this._hideLoadingPage(); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); + osparc.FlashMessenger.logError(err); }); }, @@ -1546,8 +1474,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(studyData => this.__startStudyAfterCreating(studyData["uuid"])) .catch(err => { this._hideLoadingPage(); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); + osparc.FlashMessenger.logError(err); }); }, @@ -1561,8 +1488,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(studyId => this.__startStudyAfterCreating(studyId)) .catch(err => { this._hideLoadingPage(); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); + osparc.FlashMessenger.logError(err); }); }, @@ -1648,8 +1574,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const duplicateStudyButton = this.__getDuplicateMenuButton(studyData); menu.add(duplicateStudyButton); - if (osparc.product.Utils.isProduct("osparc")) { - const exportStudyButton = this.__getExportMenuButton(studyData); + if (osparc.product.Utils.hasConvertToPipelineEnabled()) { + const convertToPipelineButton = this.__getConvertToPipelineMenuButton(studyData); + menu.add(convertToPipelineButton); + } + + if (osparc.product.Utils.hasExportCMisEnabled()) { + const exportStudyButton = this.__getExportCMisMenuButton(studyData); menu.add(exportStudyButton); } @@ -1744,23 +1675,15 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __updateName: function(studyData, name) { - osparc.info.StudyUtils.patchStudyData(studyData, "name", name) + osparc.store.Study.patchStudyData(studyData, "name", name) .then(() => this._updateStudyData(studyData)) - .catch(err => { - console.error(err); - const msg = err.message || this.tr("Something went wrong Renaming"); - osparc.FlashMessenger.logAs(msg, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while renaming"))); }, __updateThumbnail: function(studyData, url) { - osparc.info.StudyUtils.patchStudyData(studyData, "thumbnail", url) + osparc.store.Study.patchStudyData(studyData, "thumbnail", url) .then(() => this._updateStudyData(studyData)) - .catch(err => { - console.error(err); - const msg = err.message || this.tr("Something went wrong updating the Thumbnail"); - osparc.FlashMessenger.logAs(msg, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while updating the thumbnail"))); }, __getStudyDataMenuButton: function(card) { @@ -1783,10 +1706,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__moveStudyToWorkspace(studyData, destWorkspaceId) // first move to workspace .then(() => this.__moveStudyToFolder(studyData, destFolderId)) // then move to folder .then(() => this.__removeFromStudyList(studyData["uuid"])) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, __studyToFolderRequested: function(studyData, destWorkspaceId, destFolderId) { @@ -1860,7 +1780,26 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return duplicateButton; }, - __getExportMenuButton: function(studyData) { + __getConvertToPipelineMenuButton: function(studyData) { + const convertToPipelineButton = new qx.ui.menu.Button(this.tr("Convert to Pipeline"), null); + convertToPipelineButton["convertToPipelineButton"] = true; + const uiMode = osparc.study.Utils.getUiMode(studyData); + convertToPipelineButton.setVisibility(uiMode === "standalone" ? "visible" : "excluded"); + convertToPipelineButton.addListener("execute", () => { + this.__updateUIMode(studyData, "workbench") + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while converting to pipeline"))); + }, this); + return convertToPipelineButton; + }, + + __updateUIMode: function(studyData, uiMode) { + const studyUI = osparc.utils.Utils.deepCloneObject(studyData["ui"]); + studyUI["mode"] = uiMode; + return osparc.info.StudyUtils.patchStudyData(studyData, "ui", studyUI) + .then(() => this._updateStudyData(studyData)) + }, + + __getExportCMisMenuButton: function(studyData) { const exportButton = new qx.ui.menu.Button(this.tr("Export cMIS"), "@FontAwesome5Solid/cloud-download-alt/12"); exportButton["exportCMISButton"] = true; const isDisabled = osparc.utils.DisabledPlugins.isExportDisabled(); @@ -1910,7 +1849,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __getTrashStudyMenuButton: function(studyData) { - const trashButton = new qx.ui.menu.Button(this.tr("Move to Bin"), "@FontAwesome5Solid/trash/12"); + const trashButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); trashButton["trashButton"] = true; trashButton.set({ appearance: "menu-button" @@ -1945,21 +1884,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return this._resourcesList.find(study => study.uuid === id); }, - __createDuplicateCard: function(studyName) { - const isGrid = this._resourcesContainer.getMode() === "grid"; - const duplicatingStudyCard = isGrid ? new osparc.dashboard.GridButtonPlaceholder() : new osparc.dashboard.ListButtonPlaceholder(); - duplicatingStudyCard.buildLayout( - this.tr("Duplicating ") + studyName, - osparc.task.Duplicate.ICON + (isGrid ? "60" : "24"), - null, - true - ); - return duplicatingStudyCard; - }, - __duplicateStudy: function(studyData) { const text = this.tr("Duplicate process started and added to the background tasks"); - osparc.FlashMessenger.getInstance().logAs(text, "INFO"); + osparc.FlashMessenger.logAs(text, "INFO"); const params = { url: { @@ -1971,59 +1898,53 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }; const fetchPromise = osparc.data.Resources.fetch("studies", "duplicate", params, options); const interval = 1000; - const pollTasks = osparc.data.PollTasks.getInstance(); + const pollTasks = osparc.store.PollTasks.getInstance(); pollTasks.createPollingTask(fetchPromise, interval) .then(task => this.__taskDuplicateReceived(task, studyData["name"])) - .catch(err => { - console.error(err); - const msg = err.message || this.tr("Something went wrong Duplicating"); - osparc.FlashMessenger.logAs(msg, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while duplicating"))); }, __exportStudy: function(studyData) { - const exportTask = new osparc.task.Export(studyData); - exportTask.start(); - exportTask.setSubtitle(this.tr("Preparing files")); + const exportTaskUI = new osparc.task.Export(studyData); + exportTaskUI.setSubtitle(this.tr("Preparing files")); + + osparc.task.TasksContainer.getInstance().addTaskUI(exportTaskUI); + const text = this.tr("Exporting process started and added to the background tasks"); - osparc.FlashMessenger.getInstance().logAs(text, "INFO"); + osparc.FlashMessenger.logAs(text, "INFO"); const url = window.location.href + "v0/projects/" + studyData["uuid"] + ":xport"; const progressCB = () => { const textSuccess = this.tr("Download started"); - exportTask.setSubtitle(textSuccess); + exportTaskUI.setSubtitle(textSuccess); }; osparc.utils.Utils.downloadLink(url, "POST", null, progressCB) .catch(err => { - console.error(err); - const msg = osparc.data.Resources.getErrorMsg(JSON.parse(err.response)) || this.tr("Something went wrong Exporting the study"); - osparc.FlashMessenger.logAs(msg, "ERROR"); + const msg = osparc.data.Resources.getErrorMsg(JSON.parse(err.response)) || this.tr("Something went wrong while exporting the study"); + osparc.FlashMessenger.logError(err, msg); }) - .finally(() => { - exportTask.stop(); - }); + .finally(() => osparc.task.TasksContainer.getInstance().removeTaskUI(exportTaskUI)); }, __importStudy: function(file) { const uploadingLabel = this.tr("Uploading file"); - const importTask = new osparc.task.Import(); - importTask.start(); - importTask.setSubtitle(uploadingLabel); + const importTaskUI = new osparc.task.Import(); + importTaskUI.setSubtitle(uploadingLabel); + + osparc.task.TasksContainer.getInstance().addTaskUI(importTaskUI); const text = this.tr("Importing process started and added to the background tasks"); - osparc.FlashMessenger.getInstance().logAs(text, "INFO"); - - const isGrid = this._resourcesContainer.getMode() === "grid"; - const importingStudyCard = isGrid ? new osparc.dashboard.GridButtonPlaceholder() : new osparc.dashboard.ListButtonPlaceholder(); - importingStudyCard.buildLayout( - this.tr("Importing Study..."), - "@FontAwesome5Solid/cloud-upload-alt/" + (isGrid ? "60" : "24"), - uploadingLabel, - true - ); - importingStudyCard.subscribeToFilterGroup("searchBarFilter"); - this._resourcesContainer.addNonResourceCard(importingStudyCard); + osparc.FlashMessenger.logAs(text, "INFO"); + + const cardTitle = this.tr("Importing Study..."); + const cardIcon = "@FontAwesome5Solid/cloud-upload-alt"; + const importingStudyCard = this._addTaskCard(null, cardTitle, cardIcon); + if (importingStudyCard) { + this.__attachImportEventHandler(file, importTaskUI, importingStudyCard); + } + }, + __attachImportEventHandler: function(file, importTaskUI, importingStudyCard) { const body = new FormData(); body.append("fileName", file); @@ -2036,7 +1957,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (percentComplete === 100) { const processingLabel = this.tr("Processing study"); importingStudyCard.getChildControl("state-label").setValue(processingLabel); - importTask.setSubtitle(processingLabel); + importTaskUI.setSubtitle(processingLabel); importingStudyCard.getChildControl("progress-bar").exclude(); } } else { @@ -2048,7 +1969,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (req.status == 200) { const processingLabel = this.tr("Processing study"); importingStudyCard.getChildControl("state-label").setValue(processingLabel); - importTask.setSubtitle(processingLabel); + importTaskUI.setSubtitle(processingLabel); importingStudyCard.getChildControl("progress-bar").exclude(); const data = JSON.parse(req.responseText); const params = { @@ -2056,37 +1977,33 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { "studyId": data["data"]["uuid"] } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(studyData => this._updateStudyData(studyData)) - .catch(err => { - console.error(err); - const msg = this.tr("Something went wrong Fetching the study"); - osparc.FlashMessenger.logAs(msg, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while fetching the study"))) .finally(() => { - importTask.stop(); + osparc.task.TasksContainer.getInstance().removeTaskUI(importTaskUI); this._resourcesContainer.removeNonResourceCard(importingStudyCard); }); } else if (req.status == 400) { - importTask.stop(); + osparc.task.TasksContainer.getInstance().removeTaskUI(importTaskUI); this._resourcesContainer.removeNonResourceCard(importingStudyCard); - const msg = osparc.data.Resources.getErrorMsg(JSON.parse(req.response)) || this.tr("Something went wrong Importing the study"); - osparc.FlashMessenger.logAs(msg, "ERROR"); + const msg = osparc.data.Resources.getErrorMsg(JSON.parse(req.response)) || this.tr("Something went wrong while importing the study"); + osparc.FlashMessenger.logError(msg); } }); req.addEventListener("error", e => { // transferFailed - importTask.stop(); + osparc.task.TasksContainer.getInstance().removeTaskUI(importTaskUI); this._resourcesContainer.removeNonResourceCard(importingStudyCard); - const msg = osparc.data.Resources.getErrorMsg(e) || this.tr("Something went wrong Importing the study"); - osparc.FlashMessenger.logAs(msg, "ERROR"); + const msg = osparc.data.Resources.getErrorMsg(e) || this.tr("Something went wrong while importing the study"); + osparc.FlashMessenger.logError(msg); }); req.addEventListener("abort", e => { // transferAborted - importTask.stop(); + osparc.task.TasksContainer.getInstance().removeTaskUI(importTaskUI); this._resourcesContainer.removeNonResourceCard(importingStudyCard); - const msg = osparc.data.Resources.getErrorMsg(e) || this.tr("Something went wrong Importing the study"); - osparc.FlashMessenger.logAs(msg, "ERROR"); + const msg = osparc.data.Resources.getErrorMsg(e) || this.tr("Something went wrong while importing the study"); + osparc.FlashMessenger.logError(msg); }); req.open("POST", "/v0/projects:import", true); req.send(body); @@ -2097,13 +2014,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(() => { this.__removeFromStudyList(studyData.uuid); const msg = this.tr("Successfully restored"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.evaluateTrashEmpty(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.resetSelection()); }, @@ -2111,14 +2025,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.store.Store.getInstance().trashStudy(studyData.uuid) .then(() => { this.__removeFromStudyList(studyData.uuid); - const msg = this.tr("Successfully moved to Bin"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + const msg = this.tr("Successfully deleted"); + osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.setTrashEmpty(false); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.resetSelection()); }, @@ -2139,7 +2050,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // remove me from collaborators const myGid = osparc.auth.Data.getInstance().getGroupId(); delete arCopy[myGid]; - return osparc.info.StudyUtils.patchStudyData(studyData, "accessRights", arCopy); + return osparc.store.Study.patchStudyData(studyData, "accessRights", arCopy); }, __doDeleteStudy: function(studyData) { @@ -2152,10 +2063,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } operationPromise .then(() => this.__removeFromStudyList(studyData.uuid)) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.resetSelection()); }, @@ -2164,19 +2072,18 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __createConfirmTrashWindow: function(studyNames) { - let msg = this.tr("Are you sure you want to move"); + let msg = this.tr("Are you sure you want to delete"); if (studyNames.length > 1) { const studiesText = osparc.product.Utils.getStudyAlias({plural: true}); - msg += ` ${studyNames.length} ${studiesText} ` + msg += ` ${studyNames.length} ${studiesText}?`; } else { - msg += ` '${studyNames[0]}' `; + msg += ` '${studyNames[0]}'?`; } - msg += this.tr("to the Bin?"); const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); msg += "

" + (studyNames.length > 1 ? "They" : "It") + this.tr(` will be permanently deleted after ${trashDays} days.`); const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ - caption: this.tr("Move to Bin"), - confirmText: this.tr("Move to Bin"), + caption: this.tr("Delete"), + confirmText: this.tr("Delete"), confirmAction: "warning", }); osparc.utils.Utils.setIdToWidget(confirmationWin.getConfirmButton(), "confirmDeleteStudyBtn"); @@ -2207,45 +2114,26 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return confirmationWin; }, - __createConfirmEmptyTrashWindow: function() { - const msg = this.tr("Items in the bin will be permanently deleted"); - const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ - caption: this.tr("Delete"), - confirmText: this.tr("Delete permanently"), - confirmAction: "delete" - }); - return confirmationWin; - }, - // TASKS // - __tasksReceived: function(tasks) { - tasks.forEach(taskData => this._taskDataReceived(taskData)); - }, - - _taskDataReceived: function(taskData) { - // a bit hacky - if (taskData["task_id"].includes("from_study") && !taskData["task_id"].includes("as_template")) { - const interval = 1000; - const pollTasks = osparc.data.PollTasks.getInstance(); - const task = pollTasks.addTask(taskData, interval); - if (task === null) { - return; - } - // ask backend for studyData? + __tasksToCards: function() { + const tasks = osparc.store.PollTasks.getInstance().getDuplicateStudyTasks(); + tasks.forEach(task => { const studyName = ""; this.__taskDuplicateReceived(task, studyName); - } + }); }, __taskDuplicateReceived: function(task, studyName) { const duplicateTaskUI = new osparc.task.Duplicate(studyName); duplicateTaskUI.setTask(task); - duplicateTaskUI.start(); - const duplicatingStudyCard = this.__createDuplicateCard(studyName); - duplicatingStudyCard.setTask(task); - duplicatingStudyCard.subscribeToFilterGroup("searchBarFilter"); - this._resourcesContainer.addNonResourceCard(duplicatingStudyCard); - this.__attachDuplicateEventHandler(task, duplicateTaskUI, duplicatingStudyCard); + + osparc.task.TasksContainer.getInstance().addTaskUI(duplicateTaskUI); + + const cardTitle = this.tr("Duplicating ") + studyName; + const duplicatingStudyCard = this._addTaskCard(task, cardTitle, osparc.task.Duplicate.ICON); + if (duplicatingStudyCard) { + this.__attachDuplicateEventHandler(task, duplicateTaskUI, duplicatingStudyCard); + } }, __attachDuplicateEventHandler: function(task, taskUI, duplicatingStudyCard) { @@ -2253,22 +2141,24 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (msg) { osparc.FlashMessenger.logAs(msg, msgLevel); } - taskUI.stop(); + osparc.store.PollTasks.getInstance().removeTask(task); + osparc.task.TasksContainer.getInstance().removeTaskUI(taskUI); this._resourcesContainer.removeNonResourceCard(duplicatingStudyCard); }; task.addListener("taskAborted", () => { const msg = this.tr("Duplication cancelled"); - finished(msg, "INFO"); + finished(msg, "WARNING"); }); task.addListener("resultReceived", e => { - finished(); + const msg = this.tr("Duplication completed"); + finished(msg, "INFO"); const duplicatedStudyData = e.getData(); this._updateStudyData(duplicatedStudyData); }); task.addListener("pollingError", e => { const err = e.getData(); - const msg = this.tr("Something went wrong Duplicating the study
") + err.message; + const msg = this.tr("Something went wrong while duplicating the study
") + err.message; finished(msg, "ERROR"); }); } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js index afe3d47bd95..3a6ba34f6b3 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js @@ -45,7 +45,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { "locationChanged": "qx.event.type.Data", "workspaceUpdated": "qx.event.type.Data", "deleteWorkspaceRequested": "qx.event.type.Data", - "emptyTrashRequested": "qx.event.type.Event", + "trashEmptied": "qx.event.type.Event", }, properties: { @@ -193,13 +193,28 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { break; } case "empty-trash-button": { - control = new qx.ui.form.Button(this.tr("Empty Bin"), "@FontAwesome5Solid/trash/14").set({ + control = new osparc.ui.form.FetchButton(this.tr("Delete all"), "@FontAwesome5Solid/trash/14").set({ appearance: "danger-button", allowGrowY: false, alignY: "middle", - visibility: "excluded", // Not yet implemented }); - control.addListener("execute", () => this.fireEvent("emptyTrashRequested")); + control.addListener("execute", () => { + const win = this.__createConfirmEmptyTrashWindow(); + win.center(); + win.open(); + win.addListener("close", () => { + control.setFetching(true); + if (win.getConfirmed()) { + osparc.data.Resources.fetch("trash", "delete") + .then(() => { + this.fireEvent("trashEmptied") + }) + .finally(() => control.setFetching(false)); + } else { + control.setFetching(false) + } + }, this); + }); this._addAt(control, this.self().POS.EMPTY_TRASH_BUTTON); break; } @@ -208,14 +223,26 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { return control || this.base(arguments, id); }, - __titleTapped: function() { - const workspaceId = this.getCurrentWorkspaceId(); - const folderId = null; - this.setCurrentFolderId(folderId); - this.fireDataEvent("locationChanged", { - workspaceId, - folderId, + __createConfirmEmptyTrashWindow: function() { + const msg = this.tr("All items will be permanently deleted"); + const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Delete"), + confirmText: this.tr("Delete permanently"), + confirmAction: "delete" }); + return confirmationWin; + }, + + __titleTapped: function() { + if (osparc.store.Store.getInstance().getStudyBrowserContext() === "studiesAndFolders") { + const workspaceId = this.getCurrentWorkspaceId(); + const folderId = null; + this.setCurrentFolderId(folderId); + this.fireDataEvent("locationChanged", { + workspaceId, + folderId, + }); + } }, __buildLayout: function() { @@ -276,10 +303,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { break; case "trash": { this.__setIcon("@FontAwesome5Solid/trash/20"); - title.setValue(this.tr("Bin")); + title.setValue(this.tr("Recently Deleted")); const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); description.set({ - value: this.tr(`Items in the Bin will be permanently deleted after ${trashDays} days.`), + value: this.tr(`Items here will be permanently deleted after ${trashDays} days.`), visibility: "visible", }); break; diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js index e9443c4ecbb..bcaebf17aa6 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js @@ -23,16 +23,6 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { this.base(arguments); }, - properties: { - multiSelection: { - check: "Boolean", - init: false, - nullable: false, - event: "changeMultiSelection", - apply: "__applyMultiSelection" - } - }, - members: { __updateAllButton: null, @@ -85,6 +75,8 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { }, __reloadTemplates: function() { + this.__tasksToCards(); + osparc.data.Resources.getInstance().getAllPages("templates") .then(templates => this.__setResourcesToList(templates)) .catch(err => { @@ -131,118 +123,6 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { this.resetSelection(); }, - _createStudyFromTemplate: function(templateData) { - if (!this._checkLoggedIn()) { - return; - } - - const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); - this._showLoadingPage(this.tr("Creating ") + (templateData.name || studyAlias)); - - if (osparc.desktop.credits.Utils.areWalletsEnabled()) { - const studyOptions = new osparc.study.StudyOptions(); - // they will be patched once the study is created - studyOptions.setPatchStudy(false); - studyOptions.setStudyData(templateData); - studyOptions.getChildControl("open-button").setLabel(this.tr("New")); - const win = osparc.study.StudyOptions.popUpInWindow(studyOptions); - win.moveItUp(); - const cancelStudyOptions = () => { - this._hideLoadingPage(); - win.close(); - } - win.addListener("cancel", () => cancelStudyOptions()); - studyOptions.addListener("cancel", () => cancelStudyOptions()); - studyOptions.addListener("startStudy", () => { - const newName = studyOptions.getChildControl("title-field").getValue(); - const walletSelection = studyOptions.getChildControl("wallet-selector").getSelection(); - const nodesPricingUnits = studyOptions.getChildControl("study-pricing-units").getNodePricingUnits(); - win.close(); - - this._showLoadingPage(this.tr("Creating ") + (newName || studyAlias)); - osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage) - .then(newStudyData => { - const studyId = newStudyData["uuid"]; - const openCB = () => { - this._hideLoadingPage(); - }; - const cancelCB = () => { - this._hideLoadingPage(); - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "delete", params); - }; - - const promises = []; - // patch the name - if (newStudyData["name"] !== newName) { - promises.push(osparc.study.StudyOptions.updateName(newStudyData, newName)); - } - // patch the wallet - if (walletSelection.length && walletSelection[0]["walletId"]) { - const walletId = walletSelection[0]["walletId"]; - promises.push(osparc.study.StudyOptions.updateWallet(newStudyData["uuid"], walletId)); - } - // patch the pricing units - // the nodeIds are coming from the original template, they need to be mapped to the newStudy - const workbench = newStudyData["workbench"]; - const nodesIdsListed = []; - Object.keys(workbench).forEach(nodeId => { - const node = workbench[nodeId]; - if (osparc.study.StudyPricingUnits.includeInList(node)) { - nodesIdsListed.push(nodeId); - } - }); - nodesPricingUnits.forEach((nodePricingUnits, idx) => { - const selectedPricingUnitId = nodePricingUnits.getPricingUnits().getSelectedUnitId(); - if (selectedPricingUnitId) { - const nodeId = nodesIdsListed[idx]; - const pricingPlanId = nodePricingUnits.getPricingPlanId(); - promises.push(osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId)); - } - }); - - Promise.all(promises) - .then(() => { - win.close(); - const showStudyOptions = false; - this._startStudyById(studyId, openCB, cancelCB, showStudyOptions); - }); - }) - .catch(err => { - this._hideLoadingPage(); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); - }); - }); - } else { - osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage) - .then(newStudyData => { - const studyId = newStudyData["uuid"]; - const openCB = () => this._hideLoadingPage(); - const cancelCB = () => { - this._hideLoadingPage(); - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "delete", params); - }; - const isStudyCreation = true; - this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); - }) - .catch(err => { - this._hideLoadingPage(); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); - }); - } - }, - // LAYOUT // _createLayout: function() { this._createSearchBar(); @@ -326,7 +206,7 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { const templatePromises = []; for (const nodeId in studyData["workbench"]) { const node = studyData["workbench"][nodeId]; - const latestCompatible = osparc.service.Utils.getLatestCompatible(node["key"], node["version"]); + const latestCompatible = osparc.store.Services.getLatestCompatible(node["key"], node["version"]); if (latestCompatible && (node["key"] !== latestCompatible["key"] || node["version"] !== latestCompatible["version"])) { const patchData = {}; if (node["key"] !== latestCompatible["key"]) { @@ -335,17 +215,13 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { if (node["version"] !== latestCompatible["version"]) { patchData["version"] = latestCompatible["version"]; } - templatePromises.push(osparc.info.StudyUtils.patchNodeData(uniqueTemplateData, nodeId, patchData)); + templatePromises.push(osparc.store.Study.patchNodeData(uniqueTemplateData, nodeId, patchData)); } } Promise.all(templatePromises) .then(() => this._updateTemplateData(uniqueTemplateData)) .catch(err => { - if ("message" in err) { - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - } else { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong"), "ERROR"); - } + osparc.FlashMessenger.logError(err); }); } }, @@ -390,7 +266,7 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { return null; } - const editButton = new qx.ui.menu.Button(this.tr("Edit")); + const editButton = new qx.ui.menu.Button(this.tr("Open")); editButton.addListener("execute", () => this.__editTemplate(templateData), this); return editButton; }, @@ -450,17 +326,14 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { const arCopy = osparc.utils.Utils.deepCloneObject(studyData["accessRights"]); // remove collaborator delete arCopy[myGid]; - operationPromise = osparc.info.StudyUtils.patchStudyData(studyData, "accessRights", arCopy); + operationPromise = osparc.store.Study.patchStudyData(studyData, "accessRights", arCopy); } else { // delete study operationPromise = osparc.store.Store.getInstance().deleteStudy(studyData.uuid); } operationPromise .then(() => this.__removeFromTemplateList(studyData.uuid)) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, __removeFromTemplateList: function(templateId) { @@ -473,18 +346,40 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { // MENU // // TASKS // + __tasksToCards: function() { + const tasks = osparc.store.PollTasks.getInstance().getPublishTemplateTasks(); + tasks.forEach(task => { + const studyName = ""; + this.taskToTemplateReceived(task, studyName); + }); + }, + + taskToTemplateReceived: function(task, studyName) { + const toTemplateTaskUI = new osparc.task.ToTemplate(studyName); + toTemplateTaskUI.setTask(task); + + osparc.task.TasksContainer.getInstance().addTaskUI(toTemplateTaskUI); + + const cardTitle = this.tr("Publishing ") + studyName; + const toTemplateCard = this._addTaskCard(task, cardTitle, osparc.task.ToTemplate.ICON); + if (toTemplateCard) { + this.__attachToTemplateEventHandler(task, toTemplateTaskUI, toTemplateCard); + } + }, + __attachToTemplateEventHandler: function(task, taskUI, toTemplateCard) { const finished = (msg, msgLevel) => { if (msg) { osparc.FlashMessenger.logAs(msg, msgLevel); } - taskUI.stop(); + osparc.store.PollTasks.getInstance().removeTask(task); + osparc.task.TasksContainer.getInstance().removeTaskUI(taskUI); this._resourcesContainer.removeNonResourceCard(toTemplateCard); }; task.addListener("taskAborted", () => { const msg = this.tr("Study to Template cancelled"); - finished(msg, "INFO"); + finished(msg, "WARNING"); }); task.addListener("updateReceived", e => { const updateData = e.getData(); @@ -499,53 +394,16 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { } }, this); task.addListener("resultReceived", e => { - finished(); + const msg = this.tr("Template created"); + finished(msg, "INFO"); this.reloadResources(); }); task.addListener("pollingError", e => { const err = e.getData(); - const msg = this.tr("Something went wrong Publishing the study
") + err.message; + const msg = this.tr("Something went wrong while publishing the study
") + err.message; finished(msg, "ERROR"); }); }, - - _taskDataReceived: function(taskData) { - // a bit hacky - if (taskData["task_id"].includes("from_study") && taskData["task_id"].includes("as_template")) { - const interval = 1000; - const pollTasks = osparc.data.PollTasks.getInstance(); - const task = pollTasks.addTask(taskData, interval); - if (task === null) { - return; - } - // ask backend for studyData? - const studyName = ""; - this.taskToTemplateReceived(task, studyName); - } - }, - - taskToTemplateReceived: function(task, studyName) { - const toTemplateTaskUI = new osparc.task.ToTemplate(studyName); - toTemplateTaskUI.setTask(task); - toTemplateTaskUI.start(); - const toTemplateCard = this.__createToTemplateCard(studyName); - toTemplateCard.setTask(task); - this.__attachToTemplateEventHandler(task, toTemplateTaskUI, toTemplateCard); - }, - - __createToTemplateCard: function(studyName) { - const isGrid = this._resourcesContainer.getMode() === "grid"; - const toTemplateCard = isGrid ? new osparc.dashboard.GridButtonPlaceholder() : new osparc.dashboard.ListButtonPlaceholder(); - toTemplateCard.buildLayout( - this.tr("Publishing ") + studyName, - osparc.task.ToTemplate.ICON + (isGrid ? "60" : "24"), - null, - true - ); - toTemplateCard.subscribeToFilterGroup("searchBarFilter"); - this._resourcesContainer.addNonResourceCard(toTemplateCard); - return toTemplateCard; - } // TASKS // } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js index a712666f7fd..80f08813217 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js @@ -51,11 +51,11 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonBase", { nullable: true }, - icon: { + thumbnail: { check: "String", init: null, nullable: true, - apply: "_applyIcon", + apply: "__applyThumbnail", }, resourceType: { @@ -184,7 +184,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonBase", { flex: 1 }); break; - case "icon": { + case "thumbnail": { layout = this.getChildControl("body"); const maxWidth = this.self().ITEM_WIDTH; control = new osparc.ui.basic.Thumbnail(null, maxWidth, 124); @@ -269,9 +269,8 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonBase", { return layout; }, - // overridden - _applyIcon: function(value) { - const image = this.getChildControl("icon").getChildControl("image"); + __applyThumbnail: function(value) { + const image = this.getChildControl("thumbnail").getChildControl("image"); if ( value.includes("@FontAwesome5Solid/") || value.includes("@MaterialIcons/") diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js index e2a5ab98086..d645d5799bc 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js @@ -161,7 +161,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { workspace.bind("workspaceId", this, "workspaceId"); workspace.bind("name", this, "title"); workspace.bind("description", this, "description"); - workspace.bind("thumbnail", this, "icon", { + workspace.bind("thumbnail", this, "thumbnail", { converter: thumbnail => thumbnail ? thumbnail : osparc.store.Workspaces.iconPath(-1) }); workspace.bind("accessRights", this, "accessRights"); @@ -188,9 +188,10 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { const menuButton = this.getChildControl("menu-button"); menuButton.setVisibility("visible"); - const menu = new qx.ui.menu.Menu(); - menu.setPosition("bottom-right"); - osparc.utils.Utils.prettifyMenu(menu); + const menu = new qx.ui.menu.Menu().set({ + appearance: "menu-wider", + position: "bottom-right", + }); const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); if ( @@ -217,7 +218,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { menu.addSeparator(); - const trashButton = new qx.ui.menu.Button(this.tr("Move to Bin"), "@FontAwesome5Solid/trash/12"); + const trashButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); trashButton.addListener("execute", () => this.__trashWorkspaceRequested(), this); menu.add(trashButton); } else if (studyBrowserContext === "trash") { @@ -263,7 +264,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { const dateBy = this.getChildControl("date-by"); dateBy.set({ date: value, - toolTipText: this.tr("Moved to the bin"), + toolTipText: this.tr("Deleted"), }); } }, @@ -300,11 +301,11 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { __trashWorkspaceRequested: function() { const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); - let msg = this.tr("Are you sure you want to move the Workspace and all its content to the Bin?"); + let msg = this.tr("Are you sure you want to delete the Workspace and all its content?"); msg += "

" + this.tr("It will be permanently deleted after ") + trashDays + " days."; const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ - caption: this.tr("Move to Bin"), - confirmText: this.tr("Move to Bin"), + caption: this.tr("Delete"), + confirmText: this.tr("Delete"), confirmAction: "delete" }); confirmationWin.center(); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js index aa8425858a8..8545d665811 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js @@ -37,7 +37,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonNew", { const title = this.getChildControl("title"); title.setValue(this.tr("New Workspace")); - this.setIcon(osparc.dashboard.CardBase.NEW_ICON); + this.setThumbnail(osparc.dashboard.CardBase.NEW_ICON); this.getChildControl("header").set({ opacity: 1 diff --git a/services/static-webserver/client/source/class/osparc/data/Converters.js b/services/static-webserver/client/source/class/osparc/data/Converters.js index c5cae8bd25c..d567b63e561 100644 --- a/services/static-webserver/client/source/class/osparc/data/Converters.js +++ b/services/static-webserver/client/source/class/osparc/data/Converters.js @@ -24,40 +24,6 @@ qx.Class.define("osparc.data.Converters", { type: "static", statics: { - __mergeFileTreeChildren: function(one, two) { - let newDir = true; - for (let i=0; i { - if (a["label"] > b["label"]) { - return 1; - } - if (a["label"] < b["label"]) { - return -1; - } - return 0; - }); - children.forEach(child => { - if ("children" in child) { - this.sortFiles(child["children"]); - } - }); - } - }, - sortModelByLabel: function(model) { model.getChildren().sort((a, b) => { if (a.getLabel() > b.getLabel()) { @@ -70,96 +36,44 @@ qx.Class.define("osparc.data.Converters", { }); }, - fromDSMToVirtualTreeModel: function(datasetId, files) { - let children = []; - for (let i=0; i 0) { + // simcore: studyId + nodeId + fileId + // datcore: datasetId + return splitted[0]; + } + return null; + }, + + displayPathToLabel: function(encodedDisplayPath, options) { + const parts = encodedDisplayPath.split("/"); + const decodedParts = parts.map(decodeURIComponent); + if (options.first) { + return decodedParts[0]; + } else if (options.last) { + return decodedParts[decodedParts.length-1]; + } else if ("pos" in options && options["pos"] < decodedParts.length) { + return decodedParts[options["pos"]]; + } + return decodedParts[0]; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/data/Job.js b/services/static-webserver/client/source/class/osparc/data/Job.js new file mode 100644 index 00000000000..fa8046d3a94 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/Job.js @@ -0,0 +1,84 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.data.Job", { + extend: qx.core.Object, + + construct: function(jobData) { + this.base(arguments); + + this.set({ + jobId: jobData["job_id"], + solver: jobData["solver"], + status: jobData["status"], + progress: jobData["progress"], + submittedAt: jobData["submitted_at"] ? new Date(jobData["submitted_at"]) : null, + startedAt: jobData["started_at"] ? new Date(jobData["started_at"]) : null, + instance: jobData["instance"], + }); + }, + + properties: { + jobId: { + check: "String", + nullable: false, + init: null, + }, + + solver: { + check: "String", + nullable: false, + init: null, + }, + + status: { + check: "String", + nullable: false, + init: null, + }, + + progress: { + check: "Number", + init: null, + nullable: true, + }, + + submittedAt: { + check: "Date", + init: null, + nullable: true, + }, + + startedAt: { + check: "Date", + init: null, + nullable: true, + }, + + instance: { + check: "String", + nullable: false, + init: null, + }, + + info: { + check: "Object", + nullable: false, + init: null, + }, + }, +}); diff --git a/services/static-webserver/client/source/class/osparc/data/Permissions.js b/services/static-webserver/client/source/class/osparc/data/Permissions.js index 0cc74fc9cc0..c5029458feb 100644 --- a/services/static-webserver/client/source/class/osparc/data/Permissions.js +++ b/services/static-webserver/client/source/class/osparc/data/Permissions.js @@ -262,7 +262,7 @@ qx.Class.define("osparc.data.Permissions", { if (["anonymous", "guest"].includes(this.getRole())) { msg = "Please register to use this functionality"; } - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); } return canDo; }, diff --git a/services/static-webserver/client/source/class/osparc/data/PollTask.js b/services/static-webserver/client/source/class/osparc/data/PollTask.js index bd0d948c864..ce9ee8dc707 100644 --- a/services/static-webserver/client/source/class/osparc/data/PollTask.js +++ b/services/static-webserver/client/source/class/osparc/data/PollTask.js @@ -92,9 +92,13 @@ qx.Class.define("osparc.data.PollTask", { statics: { extractPathname: function(href) { - // For the long running tasks, only the pathname is relevant to the frontend - const url = new URL(href); - return url.pathname; + try { + // For the long running tasks, only the pathname is relevant to the frontend + const url = new URL(href); + return url.pathname; + } catch (_) { + return href; + } } }, @@ -112,7 +116,7 @@ qx.Class.define("osparc.data.PollTask", { if (resp.status === 200) { return resp.json(); } - const errMsg = qx.locale.Manager.tr("Failed polling status"); + const errMsg = qx.locale.Manager.tr("Unsuccessful polling status"); const err = new Error(errMsg); this.fireDataEvent("pollingError", err); throw err; diff --git a/services/static-webserver/client/source/class/osparc/data/Resources.js b/services/static-webserver/client/source/class/osparc/data/Resources.js index 86bff1eb550..c738cd8c3b6 100644 --- a/services/static-webserver/client/source/class/osparc/data/Resources.js +++ b/services/static-webserver/client/source/class/osparc/data/Resources.js @@ -125,6 +125,10 @@ qx.Class.define("osparc.data.Resources", { method: "GET", url: statics.API + "/projects/{studyId}" }, + getServices: { + method: "GET", + url: statics.API + "/projects/{studyId}/nodes/-/services" + }, getActive: { useCache: false, method: "GET", @@ -458,8 +462,8 @@ qx.Class.define("osparc.data.Resources", { "trash": { endpoints: { delete: { - method: "DELETE", - url: statics.API + "/trash" + method: "POST", + url: statics.API + "/trash:empty" } } }, @@ -549,7 +553,6 @@ qx.Class.define("osparc.data.Resources", { */ "tasks": { useCache: false, - idField: "id", endpoints: { get: { method: "GET", @@ -574,24 +577,20 @@ qx.Class.define("osparc.data.Resources", { }, /* - * SERVICES V2 (web-api >=0.42.0) + * SERVICES V2 */ "servicesV2": { useCache: false, // handled in osparc.store.Services idField: ["key", "version"], endpoints: { - get: { + getOne: { method: "GET", - url: statics.API + "/catalog/services/-/latest" + url: statics.API + "/catalog/services/{key}/{version}" }, getPage: { method: "GET", url: statics.API + "/catalog/services/-/latest?offset={offset}&limit={limit}" }, - getOne: { - method: "GET", - url: statics.API + "/catalog/services/{key}/{version}" - }, patch: { method: "PATCH", url: statics.API + "/catalog/services/{key}/{version}" @@ -981,7 +980,11 @@ qx.Class.define("osparc.data.Resources", { updateEmailTemplate: { method: "PUT", url: statics.API + "/products/{productName}/templates/{templateId}" - } + }, + getUiConfig: { + method: "GET", + url: statics.API + "/products/current/ui" + }, } }, "invitations": { @@ -1172,46 +1175,54 @@ qx.Class.define("osparc.data.Resources", { "storageLocations": { useCache: true, endpoints: { - get: { + getLocations: { method: "GET", url: statics.API + "/storage/locations" } } }, /* - * STORAGE DATASETS + * STORAGE FILES */ - "storageDatasets": { + "storageFiles": { useCache: false, endpoints: { - getByLocation: { - method: "GET", - url: statics.API + "/storage/locations/{locationId}/datasets" + copy: { + method: "PUT", + url: statics.API + "/storage/locations/{toLoc}/files/{fileName}?extra_location={fromLoc}&extra_source={fileUuid}" + }, + delete: { + method: "DELETE", + url: statics.API + "/storage/locations/{locationId}/files/{fileUuid}" } } }, /* - * STORAGE FILES + * STORAGE PATHS */ - "storageFiles": { + "storagePaths": { useCache: false, endpoints: { - getByLocationAndDataset: { + getDatasets: { method: "GET", - url: statics.API + "/storage/locations/{locationId}/datasets/{datasetId}/metadata" + url: statics.API + "/storage/locations/{locationId}/paths?size=1000" }, - getByNode: { + getDatasetsPage: { method: "GET", - url: statics.API + "/storage/locations/0/files/metadata?uuid_filter={nodeId}" + url: statics.API + "/storage/locations/{locationId}/paths?cursor={cursor}&size=1000" }, - put: { - method: "PUT", - url: statics.API + "/storage/locations/{toLoc}/files/{fileName}?extra_location={fromLoc}&extra_source={fileUuid}" + getPaths: { + method: "GET", + url: statics.API + "/storage/locations/{locationId}/paths?file_filter={path}&size=1000" + }, + getPathsPage: { + method: "GET", + url: statics.API + "/storage/locations/{locationId}/paths?file_filter={path}&cursor={cursor}&size=1000" + }, + requestSize: { + method: "POST", + url: statics.API + "/storage/locations/0/paths/{pathId}:size" }, - delete: { - method: "DELETE", - url: statics.API + "/storage/locations/{locationId}/files/{fileUuid}" - } } }, /* @@ -1230,6 +1241,26 @@ qx.Class.define("osparc.data.Resources", { } } }, + /* + * STORAGE ASYNC + */ + "storageAsyncJobs": { + useCache: false, + endpoints: { + jobStatus: { + method: "GET", + url: statics.API + "/storage/async-jobs/{jobId}/status" + }, + jobResult: { + method: "GET", + url: statics.API + "/storage/async-jobs/{jobId}/result" + }, + abortJob: { + method: "POST", + url: statics.API + "/storage/async-jobs/{jobId}/abort" + }, + } + }, /* * ACTIVITY */ @@ -1460,13 +1491,16 @@ qx.Class.define("osparc.data.Resources", { if ("status" in err && err.status === 401) { // Unauthorized again, the cookie might have expired. // We can assume that all calls after this will respond with 401, so bring the user ot the login page. - qx.core.Init.getApplication().logout(qx.locale.Manager.tr("You were logged out. Your cookie might have expired.")); + qx.core.Init.getApplication().logout(qx.locale.Manager.tr("You have been logged out. Your cookie might have expired.")); } }); } if ([404, 503].includes(status)) { - message += "
Please try again later and/or contact support"; + // NOTE: a temporary solution to avoid duplicate information + if (!message.includes("contact") && !message.includes("try")) { + message += "
Please try again later and/or contact support"; + } } const err = Error(message ? message : `Error while trying to fetch ${endpoint} ${resource}`); if (status) { diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js index 2ca74c47274..850672e9657 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js +++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js @@ -94,18 +94,27 @@ qx.Class.define("osparc.data.model.IframeHandler", { osparc.utils.Utils.setIdToWidget(iframe.getIframe(), "iframe_"+this.getNode().getNodeId()); if (osparc.product.Utils.isProduct("s4llite")) { iframe.setShowToolbar(false); + } else { + this.getStudy().getUi().bind("mode", iframe, "showToolbar", { + converter: mode => mode !== "standalone" + }); } - iframe.addListener("restart", () => this.__restartIFrame(), this); + iframe.addListener("restart", () => this.restartIFrame(), this); iframe.getDiskUsageIndicator().setCurrentNode(this.getNode()) this.setIFrame(iframe); }, __initLoadingPage: function() { - const showZoomMaximizeButton = !osparc.product.Utils.isProduct("s4llite"); - const loadingPage = new osparc.ui.message.Loading(showZoomMaximizeButton); - loadingPage.set({ + const loadingPage = new osparc.ui.message.Loading().set({ header: this.__getLoadingPageHeader() }); + if (osparc.product.Utils.isProduct("s4llite")) { + loadingPage.setShowToolbar(false); + } else { + this.getStudy().getUi().bind("mode", loadingPage, "showToolbar", { + converter: mode => mode !== "standalone" + }); + } const node = this.getNode(); const thumbnail = node.getMetaData()["thumbnail"]; @@ -115,7 +124,9 @@ qx.Class.define("osparc.data.model.IframeHandler", { node.addListener("changeLabel", () => loadingPage.setHeader(this.__getLoadingPageHeader()), this); const nodeStatus = node.getStatus(); - const sequenceWidget = nodeStatus.getProgressSequence().getWidgetForLoadingPage(); + const sequenceWidget = nodeStatus.getProgressSequence().getWidgetForLoadingPage().set({ + width: 400 + }); nodeStatus.bind("interactive", sequenceWidget, "visibility", { converter: state => ["pending", "pulling", "starting", "connecting"].includes(state) ? "visible" : "excluded" }); @@ -159,7 +170,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { if ("status" in err && err.status === 406) { errorMsg = node.getKey() + ":" + node.getVersion() + "is retired"; node.getStatus().setInteractive("retired"); - osparc.FlashMessenger.getInstance().logAs(node.getLabel() + this.tr(" is retired"), "ERROR"); + osparc.FlashMessenger.logAs(node.getLabel() + this.tr(" is retired"), "ERROR"); } const errorMsgData = { nodeId: node.getNodeId(), @@ -181,7 +192,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { this.__unresponsiveRetries--; } else { node.getStatus().setInteractive("failed"); - osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error starting") + " " + node.getLabel(), "ERROR"); + osparc.FlashMessenger.logAs(this.tr("There was an issue starting") + " " + node.getLabel(), "ERROR"); } }); }, @@ -352,7 +363,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { node.fireDataEvent("showInLogger", msgData); // will switch to iframe's content - this.__restartIFrame(); + this.restartIFrame(); if (!node.isDynamicV2()) { node.callRetrieveInputs(); } @@ -374,7 +385,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { } }, - __restartIFrame: function() { + restartIFrame: function() { const node = this.getNode(); if (node.getServiceUrl() !== null) { // restart button pushed diff --git a/services/static-webserver/client/source/class/osparc/data/model/LicensedItem.js b/services/static-webserver/client/source/class/osparc/data/model/LicensedItem.js new file mode 100644 index 00000000000..b184523a35d --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/LicensedItem.js @@ -0,0 +1,185 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.data.model.LicensedItem", { + extend: qx.core.Object, + + /** + * @param licensedItemData {Object} Object containing the serialized LicensedItem Data + */ + construct: function(licensedItemData) { + this.base(arguments); + + let thumbnail = ""; + let date = null; + let licensedResources = []; + if (licensedItemData["licensedResources"]) { + if (licensedItemData["licensedResources"].length) { + const firstItem = licensedItemData["licensedResources"][0]["source"]; + if (firstItem["thumbnail"]) { + thumbnail = firstItem["thumbnail"]; + } + if (firstItem["features"] && firstItem["features"]["date"]) { + date = firstItem["features"]["date"]; + } + } + licensedItemData["licensedResources"].forEach(licensedRsrc => { + const licensedItemResource = new osparc.data.model.LicensedItemResource(licensedRsrc["source"]); + if (licensedItemData["termsOfUseUrl"]) { + licensedItemResource.set({ + termsOfUseUrl: licensedItemData["termsOfUseUrl"], + }) + } + licensedResources.push(licensedItemResource); + }); + } + let categoryIcon = "@FontAwesome5Solid/shopping-bag/20"; + if (licensedItemData.categoryIcon) { + categoryIcon = licensedItemData.categoryIcon; + } else if (qx.util.ResourceManager.getInstance().has(`osparc/market/${licensedItemData.categoryId}.svg`)) { + categoryIcon = `osparc/market/${licensedItemData.categoryId}.svg`; + } + + this.set({ + licensedItemId: licensedItemData.licensedItemId, + categoryId: licensedItemData.categoryId, + categoryDisplay: licensedItemData.categoryDisplay, + categoryIcon: categoryIcon, + pricingPlanId: licensedItemData.pricingPlanId, + key: licensedItemData.key, + version: licensedItemData.version, + thumbnail: thumbnail, + displayName: licensedItemData.displayName, + date: new Date(date), + licensedResources: licensedResources, + seats: licensedItemData.seats || [], + }); + }, + + properties: { + licensedItemId: { + check: "String", + nullable: false, + init: null, + event: "changeLicensedItemId", + }, + + categoryId: { + check: "String", + nullable: true, + init: null, + event: "changeCategoryId", + }, + + categoryDisplay: { + check: "String", + nullable: true, + init: null, + event: "changeCategoryDisplay", + }, + + categoryIcon: { + check: "String", + nullable: true, + init: null, + event: "changeCategoryIcon", + }, + + pricingPlanId: { + check: "Number", + nullable: false, + init: null, + event: "changePricingPlanId", + }, + + key: { + check: "String", + nullable: false, + init: null, + event: "changeKey", + }, + + version: { + check: "String", + nullable: false, + init: null, + event: "changeVersion", + }, + + thumbnail: { + check: "String", + nullable: true, + init: null, + event: "changeThumbnail", + }, + + displayName: { + check: "String", + nullable: false, + init: null, + event: "changeDisplayName", + }, + + date: { + check: "Date", + nullable: false, + init: null, + event: "changeDate", + }, + + licensedResources: { + check: "Array", + nullable: false, + init: [], + event: "changeLicensedResources", + }, + + seats: { + check: "Array", + nullable: false, + init: [], + event: "changeSeats", + }, + }, + + statics: { + addSeatsFromPurchases: function(licensedItems, purchases) { + // reset seats + Object.values(licensedItems).forEach(licensedItem => licensedItem.setSeats([])); + // populate seats + purchases.forEach(purchase => { + const { + key, + version, + } = purchase; + Object.values(licensedItems).forEach(licensedItem => { + if (licensedItem.getKey() === key && licensedItem.getVersion() <= version) { + licensedItem.getSeats().push({ + licensedItemId: purchase["licensedItemId"], + licensedItemPurchaseId: purchase["licensedItemPurchaseId"], + numOfSeats: purchase["numOfSeats"], + expireAt: new Date(purchase["expireAt"]), + }); + } + }); + }) + }, + }, + + members: { + } +}); diff --git a/services/static-webserver/client/source/class/osparc/data/model/LicensedItemResource.js b/services/static-webserver/client/source/class/osparc/data/model/LicensedItemResource.js new file mode 100644 index 00000000000..fa1a141677b --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/LicensedItemResource.js @@ -0,0 +1,159 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.data.model.LicensedItemResource", { + extend: qx.core.Object, + + /** + * @param licensedItemResourceData {Object} Object containing the serialized LicensedItem Data + */ + construct: function(licensedItemResourceData) { + this.base(arguments); + + let description = licensedItemResourceData["description"] || ""; + let title = ""; + let subtitle = null; + description = description.replace(/SPEAG/g, " "); // remove SPEAG substring + const delimiter = " - "; + let titleAndSubtitle = description.split(delimiter); + if (titleAndSubtitle.length > 0) { + title = titleAndSubtitle[0]; + titleAndSubtitle.shift(); + } + if (titleAndSubtitle.length > 0) { + subtitle = titleAndSubtitle.join(delimiter); + } + + const manufacturerData = {}; + if (licensedItemResourceData["thumbnail"]) { + if (licensedItemResourceData["thumbnail"].includes("itis.swiss")) { + manufacturerData["label"] = "IT'IS Foundation"; + manufacturerData["link"] = "https://itis.swiss/virtual-population/"; + manufacturerData["icon"] = "https://media.licdn.com/dms/image/v2/C4D0BAQE_FGa66IyvrQ/company-logo_200_200/company-logo_200_200/0/1631341490431?e=2147483647&v=beta&t=7f_IK-ArGjPrz-1xuWolAT4S2NdaVH-e_qa8hsKRaAc"; + } else if (licensedItemResourceData["thumbnail"].includes("speag.swiss")) { + manufacturerData["label"] = "Speag"; + manufacturerData["link"] = "https://speag.swiss/products/em-phantoms/overview-2/"; + manufacturerData["icon"] = "https://media.licdn.com/dms/image/v2/D4E0BAQG2CYG28KAKbA/company-logo_200_200/company-logo_200_200/0/1700045977122/schmid__partner_engineering_ag_logo?e=2147483647&v=beta&t=6CZb1jjg5TnnzQWkrZBS9R3ebRKesdflg-_xYi4dwD8"; + } + } + + this.set({ + modelId: licensedItemResourceData.id, + description: description, + title: title, + subtitle: subtitle, + thumbnail: licensedItemResourceData.thumbnail || "", + features: licensedItemResourceData.features || {}, + doi: licensedItemResourceData.doi || null, + termsOfUseUrl: licensedItemResourceData.termsOfUseUrl || null, + manufacturerLabel: manufacturerData.label || null, + manufacturerLink: manufacturerData.link || null, + manufacturerIcon: manufacturerData.icon || null, + }); + }, + + properties: { + modelId: { + check: "Number", + nullable: false, + init: null, + event: "changeModelId", + }, + + description: { + check: "String", + nullable: false, + init: null, + event: "changeDescription", + }, + + title: { + check: "String", + nullable: false, + init: null, + event: "changeTitle", + }, + + subtitle: { + check: "String", + nullable: true, + init: null, + event: "changeSubtitle", + }, + + thumbnail: { + check: "String", + nullable: false, + init: null, + event: "changeThumbnail", + }, + + features: { + check: "Object", + nullable: false, + init: null, + event: "changeFeatures", + }, + + doi: { + check: "String", + nullable: true, + init: null, + event: "changeDoi", + }, + + termsOfUseUrl: { + check: "String", + nullable: true, + init: null, + event: "changeTermsOfUseUrl", + }, + + manufacturerLabel: { + check: "String", + nullable: true, + init: null, + event: "changeManufacturerLabel", + }, + + manufacturerLink: { + check: "String", + nullable: true, + init: null, + event: "changeManufacturerLink", + }, + + manufacturerIcon: { + check: "String", + nullable: true, + init: null, + event: "changeManufacturerIcon", + }, + }, + + statics: { + longName: function(licensedResource) { + const name = licensedResource.getFeatures()["name"] || licensedResource.getSubtitle(); + const version = licensedResource.getFeatures()["version"] || ""; + const functionality = licensedResource.getFeatures()["functionality"] || "Static"; + return `${name} ${version}, ${functionality}`; + }, + }, + + members: { + } +}); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index 25e6c86e2b6..048f69ebfc0 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -480,13 +480,15 @@ qx.Class.define("osparc.data.model.Node", { } } - this.__initLogger(); - - this.initIframeHandler(); - if (this.isParameter()) { this.__initParameter(); } + + if (osparc.store.Store.getInstance().getCurrentStudy()) { + // do not initialize the logger and iframe if the study isn't open + this.__initLogger(); + this.initIframeHandler(); + } }, populateNodeUIData: function(nodeUIData) { @@ -528,7 +530,7 @@ qx.Class.define("osparc.data.model.Node", { }, initIframeHandler: function() { - if (this.isDynamic()) { + if (this.isDynamic() && this.__iframeHandler === null) { this.__iframeHandler = new osparc.data.model.IframeHandler(this.getStudy(), this); } }, @@ -549,7 +551,9 @@ qx.Class.define("osparc.data.model.Node", { return this.getIframeHandler() ? this.getIframeHandler().getLoadingPage() : null; }, - __applyPropsForm: function() { + __applyPropsForm: function(propsForm) { + osparc.utils.Utils.setIdToWidget(propsForm, "settingsForm_" + this.getNodeId()); + const checkIsPipelineRunning = () => { const isPipelineRunning = this.getStudy().isPipelineRunning(); this.getPropsForm().setEnabled(!isPipelineRunning); @@ -928,7 +932,7 @@ qx.Class.define("osparc.data.model.Node", { .then(() => this.checkState()) .catch(err => { if ("status" in err && (err.status === 409 || err.status === 402)) { - osparc.FlashMessenger.getInstance().logAs(err.message, "WARNING"); + osparc.FlashMessenger.logAs(err.message, "WARNING"); } else { console.error(err); } @@ -1061,7 +1065,7 @@ qx.Class.define("osparc.data.model.Node", { console.error(failure, error); const errorMsgData = { nodeId: this.getNodeId(), - msg: "Failed retrieving inputs", + msg: "Unsuccessful input retrieval", level: "ERROR" }; this.fireDataEvent("showInLogger", errorMsgData); @@ -1203,7 +1207,7 @@ qx.Class.define("osparc.data.model.Node", { if (!["int"].includes(type)) { return; } - const newMetadata = osparc.service.Utils.getParameterMetadata("integer"); + const newMetadata = osparc.store.Services.getParameterMetadata("integer"); if (newMetadata) { const value = this.__getInputData()["linspace_start"]; const label = this.getLabel(); @@ -1220,7 +1224,7 @@ qx.Class.define("osparc.data.model.Node", { if (!["int"].includes(type)) { return; } - const metadata = osparc.service.Utils.getLatest("simcore/services/frontend/data-iterator/int-range") + const metadata = osparc.store.Services.getLatest("simcore/services/frontend/data-iterator/int-range") if (metadata) { const value = this.__getOutputData("out_1"); const label = this.getLabel(); diff --git a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js index eec18e05f5b..bb67236294e 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js +++ b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js @@ -123,7 +123,7 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { createDisclaimerText: function() { const disclaimerText = new qx.ui.basic.Atom().set({ - label: qx.locale.Manager.tr("Please be patient, this process can take a few minutes ..."), + label: qx.locale.Manager.tr("Please wait, this process may take a few minutes ..."), padding: [20, 10], gap: 15, icon: "@FontAwesome5Solid/exclamation-triangle/16", @@ -186,8 +186,11 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { getProgress: function(report) { if (report.unit) { + const attempt = ("attempt" in report && report["attempt"] > 1) ? `(attempt ${report["attempt"]}) ` : ""; + const currentValue = osparc.utils.Utils.bytesToSize(report["actual_value"], 1, false); + const totalValue = osparc.utils.Utils.bytesToSize(report["total"], 1, false) return { - progressLabel: `${osparc.utils.Utils.bytesToSize(report["actual_value"], 1, false)} / ${osparc.utils.Utils.bytesToSize(report["total"], 1, false)}`, + progressLabel: `${attempt}${currentValue} / ${totalValue}`, value: report["actual_value"] / report["total"] } } diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js index 6a511f726b7..8f63b1bc79e 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Study.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js @@ -301,20 +301,6 @@ qx.Class.define("osparc.data.model.Study", { return false; }, - hasSlideshow: function(studyData) { - if ("ui" in studyData && "slideshow" in studyData["ui"] && Object.keys(studyData["ui"]["slideshow"]).length) { - return true; - } - return false; - }, - - getUiMode: function(studyData) { - if ("ui" in studyData && "mode" in studyData["ui"]) { - return studyData["ui"]["mode"]; - } - return null; - }, - getOutputValue: function(studyData, nodeId, portId) { if ("workbench" in studyData && nodeId in studyData["workbench"] && @@ -354,17 +340,6 @@ qx.Class.define("osparc.data.model.Study", { "RETRY" ].includes(state); }, - - __isAnyLinkedNodeMissing: function(studyData) { - const existingNodeIds = Object.keys(studyData["workbench"]); - const linkedNodeIds = osparc.data.model.Workbench.getLinkedNodeIds(studyData["workbench"]); - const allExist = linkedNodeIds.every(linkedNodeId => existingNodeIds.includes(linkedNodeId)); - return !allExist; - }, - - isCorrupt: function(studyData) { - return this.__isAnyLinkedNodeMissing(studyData); - }, }, members: { @@ -512,7 +487,7 @@ qx.Class.define("osparc.data.model.Study", { // Do not listen to output related backend updates if the node is a frontend node. // The frontend controls its output values, progress and states. // If a File Picker is uploading a file, the backend could override the current state with some older state. - if (node && nodeData && !osparc.data.model.Node.isFrontend(node)) { + if (node && nodeData && !osparc.data.model.Node.isFrontend(node.getMetaData())) { node.setOutputData(nodeData.outputs); if ("progress" in nodeData) { const progress = Number.parseInt(nodeData["progress"]); @@ -637,6 +612,19 @@ qx.Class.define("osparc.data.model.Study", { return parameters; }, + getNonFrontendNodes: function() { + const nodes = this.getWorkbench().getNodes(); + return Object.values(nodes).filter(node => node.isComputational() || node.isDynamic()); + }, + + isOnlyNodeDynamic: function() { + const validNodes = this.getNonFrontendNodes(); + if (validNodes.length === 1) { + return validNodes[0].isDynamic(); + } + return null; + }, + hasSlideshow: function() { return !this.getUi().getSlideshow().isEmpty(); }, diff --git a/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js b/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js index d6451f158e3..fcc2f7bd9f8 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js +++ b/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js @@ -63,7 +63,7 @@ qx.Class.define("osparc.data.model.StudyUI", { }, mode: { - check: ["workbench", "guided", "app"], + check: ["workbench", "guided", "app", "standalone"], // "guided" is no longer used init: "workbench", nullable: true, event: "changeMode", diff --git a/services/static-webserver/client/source/class/osparc/data/model/User.js b/services/static-webserver/client/source/class/osparc/data/model/User.js index 7294987345c..47d665f847d 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/User.js +++ b/services/static-webserver/client/source/class/osparc/data/model/User.js @@ -30,10 +30,20 @@ qx.Class.define("osparc.data.model.User", { const userId = ("id" in userData) ? parseInt(userData["id"]) : parseInt(userData["userId"]); const groupId = ("gid" in userData) ? parseInt(userData["gid"]) : parseInt(userData["groupId"]); - const username = userData["userName"]; + const username = userData["userName"] || "-"; const email = ("login" in userData) ? userData["login"] : userData["email"]; - const firstName = ("first_name" in userData) ? userData["first_name"] : userData["firstName"]; - const lastName = ("last_name" in userData) ? userData["last_name"] : userData["lastName"]; + let firstName = ""; + if (userData["first_name"]) { + firstName = userData["first_name"]; + } else if (userData["firstName"]) { + firstName = userData["firstName"]; + } + let lastName = ""; + if (userData["last_name"]) { + lastName = userData["last_name"]; + } else if (userData["lastName"]) { + lastName = userData["lastName"]; + } let description = [firstName, lastName].join(" ").trim(); // the null values will be replaced by empty strings if (email) { if (description) { @@ -50,7 +60,7 @@ qx.Class.define("osparc.data.model.User", { lastName, email, thumbnail, - label: username, + label: userData["userName"] || description, description, }); }, diff --git a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js index d442cb03b63..55c64406cd4 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js @@ -283,11 +283,12 @@ qx.Class.define("osparc.data.model.Workbench", { createNode: async function(key, version) { if (!osparc.data.Permissions.getInstance().canDo("study.node.create", true)) { - osparc.FlashMessenger.getInstance().logAs(qx.locale.Manager.tr("You are not allowed to add nodes"), "ERROR"); + const msg = qx.locale.Manager.tr("You are not allowed to add nodes"); + osparc.FlashMessenger.logError(msg); return null; } if (this.getStudy().isPipelineRunning()) { - osparc.FlashMessenger.getInstance().logAs(this.self().CANT_ADD_NODE, "ERROR"); + osparc.FlashMessenger.logError(this.self().CANT_ADD_NODE); return null; } @@ -330,7 +331,7 @@ qx.Class.define("osparc.data.model.Workbench", { level: "ERROR" }; this.fireDataEvent("showInLogger", errorMsgData); - osparc.FlashMessenger.getInstance().logAs(errorMsg, "ERROR"); + osparc.FlashMessenger.logError(errorMsg); return null; } }, @@ -426,7 +427,7 @@ qx.Class.define("osparc.data.model.Workbench", { }, __filePickerNodeRequested: async function(nodeId, portId, file) { - const filePickerMetadata = osparc.service.Utils.getFilePicker(); + const filePickerMetadata = osparc.store.Services.getFilePicker(); const filePicker = await this.createNode(filePickerMetadata["key"], filePickerMetadata["version"]); if (filePicker === null) { return; @@ -459,7 +460,7 @@ qx.Class.define("osparc.data.model.Workbench", { } else { this.removeNode(filePickerId); const msg = qx.locale.Manager.tr("File couldn't be assigned"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); } }); }, @@ -469,7 +470,7 @@ qx.Class.define("osparc.data.model.Workbench", { // create a new ParameterNode const type = osparc.utils.Ports.getPortType(requesterNode.getMetaData()["inputs"], portId); - const parameterMetadata = osparc.service.Utils.getParameterMetadata(type); + const parameterMetadata = osparc.store.Services.getParameterMetadata(type); if (parameterMetadata) { const parameterNode = await this.createNode(parameterMetadata["key"], parameterMetadata["version"]); if (parameterNode === null) { @@ -487,7 +488,7 @@ qx.Class.define("osparc.data.model.Workbench", { if (requesterNode.getPropsForm().addPortLink(portId, pmId, "out_1") !== true) { this.removeNode(pmId); const msg = qx.locale.Manager.tr("Parameter couldn't be assigned"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); } this.fireEvent("reloadModel"); } @@ -499,7 +500,7 @@ qx.Class.define("osparc.data.model.Workbench", { // create a new ProbeNode const requesterPortMD = requesterNode.getMetaData()["outputs"][portId]; const type = osparc.utils.Ports.getPortType(requesterNode.getMetaData()["outputs"], portId); - const probeMetadata = osparc.service.Utils.getProbeMetadata(type); + const probeMetadata = osparc.store.Services.getProbeMetadata(type); if (probeMetadata) { const probeNode = await this.createNode(probeMetadata["key"], probeMetadata["version"]); if (probeNode === null) { @@ -519,7 +520,7 @@ qx.Class.define("osparc.data.model.Workbench", { if (probeNode.getPropsForm().addPortLink("in_1", nodeId, portId) !== true) { this.removeNode(probeId); const msg = qx.locale.Manager.tr("Probe couldn't be assigned"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); } this.fireEvent("reloadModel"); } @@ -536,7 +537,7 @@ qx.Class.define("osparc.data.model.Workbench", { return false; } if (this.getStudy().isPipelineRunning()) { - osparc.FlashMessenger.getInstance().logAs(this.self().CANT_DELETE_NODE, "ERROR"); + osparc.FlashMessenger.logAs(this.self().CANT_DELETE_NODE, "ERROR"); return false; } diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js index d3eff170ad6..a17b42e9a8d 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js @@ -50,13 +50,13 @@ qx.Class.define("osparc.desktop.MainPage", { const navBar = this.__navBar = new osparc.navigation.NavigationBar(); navBar.populateLayout(); navBar.addListener("backToDashboardPressed", () => this.__backToDashboardPressed(), this); - navBar.addListener("downloadStudyLogs", () => this.__downloadStudyLogs(), this); + navBar.addListener("openLogger", () => this.__openLogger(), this); this._add(navBar); // Some resources request before building the main stack osparc.MaintenanceTracker.getInstance().startTracker(); osparc.CookieExpirationTracker.getInstance().startTracker(); - osparc.NewUITracker.getInstance().startTracker(); + // osparc.NewUITracker.getInstance().startTracker(); const store = osparc.store.Store.getInstance(); const preloadPromises = []; @@ -67,6 +67,11 @@ qx.Class.define("osparc.desktop.MainPage", { } preloadPromises.push(store.getAllClassifiers(true)); preloadPromises.push(osparc.store.Tags.getInstance().fetchTags()); + preloadPromises.push(osparc.store.Products.getInstance().fetchUiConfig()); + preloadPromises.push(osparc.store.PollTasks.getInstance().fetchTasks()); + if (osparc.utils.DisabledPlugins.isJobsEnabled()) { + preloadPromises.push(osparc.store.Jobs.getInstance().fetchJobs()); + } Promise.all(preloadPromises) .then(() => { const mainStack = this.__createMainStack(); @@ -171,9 +176,9 @@ qx.Class.define("osparc.desktop.MainPage", { } }, - __downloadStudyLogs: function() { + __openLogger: function() { if (this.__studyEditor) { - this.__studyEditor.getStudyLogger().downloadLogs(); + osparc.ui.window.Window.popUpInWindow(this.__studyEditor.getStudyLogger(), this.tr("Platform logger"), 950, 650); } }, @@ -199,6 +204,11 @@ qx.Class.define("osparc.desktop.MainPage", { __createDashboardLayout: function() { const dashboard = this.__dashboard = new osparc.dashboard.Dashboard(); + const tabsBar = dashboard.getChildControl("bar"); + tabsBar.set({ + paddingBottom: 6 + }); + this.__navBar.addDashboardTabButtons(tabsBar); const dashboardLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); dashboardLayout.add(dashboard, { flex: 1 @@ -213,7 +223,7 @@ qx.Class.define("osparc.desktop.MainPage", { __publishTemplate: function(data) { const text = this.tr("Started template creation and added to the background tasks"); - osparc.FlashMessenger.getInstance().logAs(text, "INFO"); + osparc.FlashMessenger.logAs(text, "INFO"); const params = { url: { @@ -226,7 +236,7 @@ qx.Class.define("osparc.desktop.MainPage", { pollTask: true }; const fetchPromise = osparc.data.Resources.fetch("studies", "postToTemplate", params, options); - const pollTasks = osparc.data.PollTasks.getInstance(); + const pollTasks = osparc.store.PollTasks.getInstance(); const interval = 1000; pollTasks.createPollingTask(fetchPromise, interval) .then(task => { @@ -236,12 +246,12 @@ qx.Class.define("osparc.desktop.MainPage", { } task.addListener("resultReceived", e => { const templateData = e.getData(); - osparc.info.StudyUtils.addCollaborators(templateData, data["accessRights"]); + osparc.store.Study.addCollaborators(templateData, data["accessRights"]); }); }) .catch(errMsg => { - const msg = this.tr("Something went wrong Duplicating the study
") + errMsg; - osparc.FlashMessenger.logAs(msg, "ERROR"); + const msg = this.tr("Something went wrong while duplicating the study
") + errMsg; + osparc.FlashMessenger.logError(msg); }); }, @@ -294,7 +304,7 @@ qx.Class.define("osparc.desktop.MainPage", { osparc.data.Resources.fetch("snapshots", "checkout", params) .then(snapshotResp => { if (!snapshotResp) { - const msg = this.tr("Snapshot not found"); + const msg = this.tr("No snapshot found"); throw new Error(msg); } const params2 = { @@ -302,7 +312,7 @@ qx.Class.define("osparc.desktop.MainPage", { "studyId": studyId } }; - osparc.data.Resources.getOne("studies", params2) + osparc.data.Resources.fetch("studies", "getOne", params2) .then(studyData => { if (!studyData) { const msg = this.tr("Study not found"); @@ -312,7 +322,7 @@ qx.Class.define("osparc.desktop.MainPage", { }); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); this.__showDashboard(); return; }); @@ -342,7 +352,7 @@ qx.Class.define("osparc.desktop.MainPage", { } }; // OM TODO. DO NOT ADD ITERATIONS TO STUDIES CACHE - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(studyData => { if (!studyData) { const msg = this.tr("Iteration not found"); @@ -351,7 +361,7 @@ qx.Class.define("osparc.desktop.MainPage", { osparc.desktop.MainPageHandler.getInstance().loadStudy(studyData); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); this.__showDashboard(); return; }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js b/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js index ee935adab67..577ddece319 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js @@ -62,6 +62,8 @@ qx.Class.define("osparc.desktop.MainPageDesktop", { } preloadPromises.push(store.getAllClassifiers(true)); preloadPromises.push(osparc.store.Tags.getInstance().fetchTags()); + preloadPromises.push(osparc.store.Products.getInstance().fetchUiConfig()); + preloadPromises.push(osparc.store.PollTasks.getInstance().fetchTasks()); Promise.all(preloadPromises) .then(() => { const desktopCenter = new osparc.desktop.credits.DesktopCenter(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js b/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js index 6e34be5d88c..40174728275 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js @@ -69,7 +69,7 @@ qx.Class.define("osparc.desktop.MainPageHandler", { "studyId": studyId } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(studyData => { if (!studyData) { const msg = qx.locale.Manager.tr("Study not found"); @@ -78,7 +78,7 @@ qx.Class.define("osparc.desktop.MainPageHandler", { this.loadStudy(studyData); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); this.showDashboard(); return; }); @@ -103,22 +103,26 @@ qx.Class.define("osparc.desktop.MainPageHandler", { } // check if it's corrupt - if (osparc.data.model.Study.isCorrupt(studyData)) { + if (osparc.study.Utils.isCorrupt(studyData)) { const msg = `${qx.locale.Manager.tr("We encountered an issue with the")} ${studyAlias}
${qx.locale.Manager.tr("Please contact support.")}`; throw new Error(msg); } this.setLoadingPageHeader(qx.locale.Manager.tr("Loading ") + studyData.name); this.showLoadingPage(); - const inaccessibleServices = osparc.study.Utils.getInaccessibleServices(studyData["workbench"]) - if (inaccessibleServices.length) { - const msg = osparc.study.Utils.getInaccessibleServicesMsg(inaccessibleServices, studyData["workbench"]); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - this.showDashboard(); - return; - } - this.showStudyEditor(); - this.__studyEditor.setStudyData(studyData); + + osparc.store.Services.getStudyServicesMetadata(studyData) + .finally(() => { + const inaccessibleServices = osparc.store.Services.getInaccessibleServices(studyData["workbench"]) + if (inaccessibleServices.length) { + const msg = osparc.store.Services.getInaccessibleServicesMsg(inaccessibleServices, studyData["workbench"]); + osparc.FlashMessenger.getInstance().logError(msg); + this.showDashboard(); + return; + } + this.showStudyEditor(); + this.__studyEditor.setStudyData(studyData); + }); } } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js index e05a37f56a1..52c33d94be9 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js @@ -72,7 +72,7 @@ qx.Class.define("osparc.desktop.SlideshowView", { const nodeId = e.getData(); this.__hideNode(nodeId); }, this); - slideshowToolbar.addListener("slidesStop", () => this.fireEvent("slidesStop"), this); + slideshowToolbar.addListener("slidesStop", () => this.getStudy().getUi().setMode("workbench"), this); this._add(slideshowToolbar); const mainView = this.__mainView = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ @@ -109,7 +109,6 @@ qx.Class.define("osparc.desktop.SlideshowView", { }, events: { - "slidesStop": "qx.event.type.Event", "startPartialPipeline": "qx.event.type.Data", "stopPipeline": "qx.event.type.Event", "backToDashboardPressed": "qx.event.type.Event", @@ -131,12 +130,6 @@ qx.Class.define("osparc.desktop.SlideshowView", { apply: "__applyMaximized", event: "changeMaximized" }, - - pageContext: { - check: ["guided", "app"], - nullable: false, - init: "guided" - } }, statics: { @@ -270,9 +263,6 @@ qx.Class.define("osparc.desktop.SlideshowView", { view = new osparc.node.slideshow.NodeView(); } view.setNode(node); - if (node.isDynamic()) { - view.getSettingsLayout().setVisibility(this.getPageContext() === "app" ? "excluded" : "visible"); - } } this.__connectMaximizeEvents(node); this.__styleView(node, view); @@ -377,7 +367,6 @@ qx.Class.define("osparc.desktop.SlideshowView", { }); } } - this.setPageContext("app"); this.__slideshowToolbar.populateButtons(true); const currentNodeId = this.getStudy().getUi().getCurrentNodeId(); const isValid = slideshow.getPosition(currentNodeId) !== -1; diff --git a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js index 3e6a0c943e2..deeb4f38372 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js @@ -37,8 +37,6 @@ qx.Class.define("osparc.desktop.StudyEditor", { }); workbenchView.addListener("slidesEdit", () => this.__editSlides(), this); - workbenchView.addListener("slidesAppStart", () => this.setPageContext(osparc.navigation.NavigationBar.PAGE_CONTEXT[2]), this); - slideshowView.addListener("slidesStop", () => this.setPageContext(osparc.navigation.NavigationBar.PAGE_CONTEXT[1]), this); workbenchView.addListener("takeSnapshot", () => this.__takeSnapshot(), this); workbenchView.addListener("takeSnapshot", () => this.__takeSnapshot(), this); @@ -72,7 +70,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { const startStopButtons = workbenchView.getStartStopButtons(); startStopButtons.addListener("startPipeline", () => this.__startPipeline([]), this); startStopButtons.addListener("startPartialPipeline", () => { - const partialPipeline = this.getPageContext() === "workbench" ? this.__workbenchView.getSelectedNodeIDs() : this.__slideshowView.getSelectedNodeIDs(); + const partialPipeline = this.getStudy().getUi().getMode() === "app" ? this.__slideshowView.getSelectedNodeIDs() : this.__workbenchView.getSelectedNodeIDs(); this.__startPipeline(partialPipeline); }, this); startStopButtons.addListener("stopPipeline", () => this.__stopPipeline(), this); @@ -103,23 +101,14 @@ qx.Class.define("osparc.desktop.StudyEditor", { apply: "__applyStudy", event: "changeStudy" }, - - pageContext: { - check: ["workbench", "guided", "app"], - init: null, - nullable: false, - event: "changePageContext", - apply: "__applyPageContext" - } }, statics: { AUTO_SAVE_INTERVAL: 3000, - READ_ONLY_TEXT: qx.locale.Manager.tr("You do not have writing permissions.
Your changes will not be saved.") + READ_ONLY_TEXT: qx.locale.Manager.tr("You do not have writing permissions.
Your changes will not be saved."), }, members: { - __study: null, __settingStudy: null, __viewsStack: null, __workbenchView: null, @@ -145,7 +134,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { "studyId": studyData.uuid } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(latestStudyData => { const study = new osparc.data.model.Study(latestStudyData); this.setStudy(study); @@ -179,6 +168,10 @@ qx.Class.define("osparc.desktop.StudyEditor", { } }, this); } + + study.getUi().addListener("changeMode", e => { + this.__uiModeChanged(e.getData(), e.getOldData()); + }); }) .catch(err => { console.error(err); @@ -212,7 +205,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { msg += "
" + err["message"]; } } - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); this.fireEvent("forceBackToDashboard"); }) .finally(() => this._hideLoadingPage()); @@ -240,7 +233,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { let msg = this.tr("The Study contains more than ") + maxNumber + this.tr(" Interactive services."); msg += "
"; msg += this.tr("Please start them manually."); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } } @@ -248,24 +241,12 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__startAutoSaveTimer(); } else { const msg = this.self().READ_ONLY_TEXT; - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } - const pageContext = study.getUi().getMode(); - switch (pageContext) { - case "guided": - case "app": - this.__slideshowView.startSlides(); - break; - default: - this.__workbenchView.openFirstNode(); - break; - } - this.addListener("changePageContext", e => { - const pageCxt = e.getData(); - study.getUi().setMode(pageCxt); - }); - this.setPageContext(pageContext); + + const uiMode = study.getUi().getMode(); + this.__uiModeChanged(uiMode); const workbench = study.getWorkbench(); workbench.addListener("retrieveInputs", e => { @@ -401,7 +382,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { const usedWallet = store.getWallets().find(wallet => wallet.getWalletId() === walletId); const walletName = usedWallet.getName(); const text = `Wallet "${walletName}", running your service(s) has run out of credits. Stopping service(s) gracefully.`; - osparc.FlashMessenger.getInstance().logAs(this.tr(text), "ERROR", flashMessageDisplayDuration); + osparc.FlashMessenger.logError(this.tr(text), null, flashMessageDisplayDuration); }, this); } }, @@ -425,7 +406,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { const node = workbench.getNode(nodeId); const label = node.getLabel(); const text = `New inputs for service ${label}. Please reload to refresh service.`; - osparc.FlashMessenger.getInstance().logAs(text, "INFO"); + osparc.FlashMessenger.logAs(text, "INFO"); } } }, this); @@ -556,7 +537,8 @@ qx.Class.define("osparc.desktop.StudyEditor", { }, __editSlides: function() { - if (this.getPageContext() !== osparc.navigation.NavigationBar.PAGE_CONTEXT[1]) { + if (this.getStudy().getUi().getMode() !== "workbench") { + // if the user is not in "workbench" mode, return return; } @@ -627,9 +609,9 @@ qx.Class.define("osparc.desktop.StudyEditor", { }, this); } else if (e.getTarget().getStatus() == "402") { const msg = await e.getTarget().getResponse().error.errors[0].message; - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } else { - this.getStudyLogger().error(null, "Failed submitting pipeline"); + this.getStudyLogger().error(null, "Unsuccessful pipeline submission"); } this.getStudy().setPipelineRunning(false); }, this); @@ -719,21 +701,41 @@ qx.Class.define("osparc.desktop.StudyEditor", { return this.__workbenchView.getLogger(); }, - __applyPageContext: function(newCtxt) { - switch (newCtxt) { - case "workbench": - this.__viewsStack.setSelection([this.__workbenchView]); - if (this.getStudy() && this.getStudy().getUi()) { - this.__workbenchView.nodeSelected(this.getStudy().getUi().getCurrentNodeId()); - } - break; + __uiModeChanged: function(newUIMode, oldUIMode) { + switch (newUIMode) { case "guided": case "app": this.__viewsStack.setSelection([this.__slideshowView]); - if (this.getStudy() && this.getStudy().getUi()) { - this.__slideshowView.startSlides(); + this.__slideshowView.startSlides(); + break; + case "standalone": { + this.__viewsStack.setSelection([this.__workbenchView]); + this.__workbenchView.openFirstNode(); + break; + } + case "workbench": + default: { + this.__viewsStack.setSelection([this.__workbenchView]); + if (oldUIMode === "standalone") { + // in this transition, show workbenchUI + this.__workbenchView.setMaximized(false); + // uncomment this when we release the osparc<->s4l integration + // this.__workbenchView.showPipeline(); + } else { + const currentNodeId = this.getStudy().getUi().getCurrentNodeId(); + if (currentNodeId && this.getStudy().getWorkbench().getNode(currentNodeId)) { + const node = this.getStudy().getWorkbench().getNode(currentNodeId); + if (node && node.isDynamic()) { + this.__workbenchView.fullscreenNode(currentNodeId); + } else { + this.__workbenchView.nodeSelected(currentNodeId); + } + } else { + this.__workbenchView.openFirstNode(); + } } break; + } } }, @@ -764,7 +766,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { const store = osparc.store.Store.getInstance(); store.getSnapshots().push(data); }) - .catch(err => osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR")); + .catch(err => osparc.FlashMessenger.logError(err)); win.close(); }, this); @@ -893,8 +895,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { if ("status" in error && error.status === 409) { console.log("Flash message blocked"); // Workaround for osparc-issues #1189 } else { - console.error(error); - osparc.FlashMessenger.getInstance().logAs(this.tr("Error saving the study"), "ERROR"); + osparc.FlashMessenger.logError(this.tr("Error saving the study")); } this.getStudyLogger().error(null, "Error updating pipeline"); // Need to throw the error to be able to handle it later diff --git a/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js b/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js index 3ffac59d401..4c8575f0442 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js +++ b/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js @@ -42,7 +42,7 @@ qx.Class.define("osparc.desktop.StudyEditorIdlingTracker", { __updateFlashMessage: function(timeoutSec) { if (this.__idleFlashMessage === null) { - this.__idleFlashMessage = osparc.FlashMessenger.getInstance().logAs(qx.locale.Manager.tr("Are you still there?"), "WARNING", timeoutSec*1000); + this.__idleFlashMessage = osparc.FlashMessenger.logAs(qx.locale.Manager.tr("Are you still there?"), "WARNING", timeoutSec*1000); } let msg = qx.locale.Manager.tr("Are you still there?") + "
"; diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js index a735b2532d6..e10cc5f62fd 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js @@ -49,16 +49,13 @@ qx.Class.define("osparc.desktop.WorkbenchView", { decorateSlider: function(slider) { slider.set({ width: 2, - backgroundColor: "#007fd4", // Visual Studio blue + backgroundColor: "visual-blue", opacity: 1 }); }, openNodeDataManager: function(node) { - const nodeDataManager = new osparc.widget.NodeDataManager(null, node.getNodeId()); - const win = osparc.ui.window.Window.popUpInWindow(nodeDataManager, node.getLabel(), 900, 600).set({ - appearance: "service-window" - }); + const win = osparc.widget.StudyDataManager.popUpInWindow(node.getStudy().getUuid(), node.getNodeId(), node.getLabel()); const closeBtn = win.getChildControl("close-button"); osparc.utils.Utils.setIdToWidget(closeBtn, "nodeDataManagerCloseBtn"); } @@ -70,7 +67,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", { "expandNavBar": "qx.event.type.Event", "backToDashboardPressed": "qx.event.type.Event", "slidesEdit": "qx.event.type.Event", - "slidesAppStart": "qx.event.type.Event", "annotationRectStart": "qx.event.type.Event", "takeSnapshot": "qx.event.type.Event", "showSnapshots": "qx.event.type.Event", @@ -81,7 +77,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { properties: { study: { check: "osparc.data.model.Study", - apply: "_applyStudy", + apply: "__applyStudy", nullable: false }, @@ -244,7 +240,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { return sidePanelsNewWidth; }, - _applyStudy: function(study) { + __applyStudy: function(study) { if (study) { this.__initViews(); this.__connectEvents(); @@ -302,9 +298,12 @@ qx.Class.define("osparc.desktop.WorkbenchView", { if (study === null) { return; } + this.__initPrimaryColumn(); this.__initSecondaryColumn(); this.__initMainView(); + + this.setMaximized(false); }, __initPrimaryColumn: function() { @@ -370,7 +369,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { dragMechanism: true, hideRoot: true }); - filesTree.populateTree(); + filesTree.populateLocations(); const storagePage = this.__storagePage = this.__createTabPage("@FontAwesome5Solid/database", this.tr("Storage"), filesTree, this.self().PRIMARY_COL_BG_COLOR); tabViewPrimary.add(storagePage); @@ -443,12 +442,12 @@ qx.Class.define("osparc.desktop.WorkbenchView", { appearance: "form-button-outlined", label: this.tr("App Mode"), toolTipText: this.tr("Start App Mode"), - icon: "@FontAwesome5Solid/play/14", + icon: osparc.dashboard.CardBase.MODE_APP, marginRight: 10, marginTop: 7, ...osparc.navigation.NavigationBar.BUTTON_OPTIONS }); - startAppButtonTB.addListener("execute", () => this.fireEvent("slidesAppStart")); + startAppButtonTB.addListener("execute", () => study.getUi().setMode("app")); topBar.add(startAppButtonTB); const collapseWithUserMenu = this.__collapseWithUserMenu = new osparc.desktop.CollapseWithUserMenu(); @@ -497,10 +496,9 @@ qx.Class.define("osparc.desktop.WorkbenchView", { studyTreeItem.addListener("changeSelectedNode", () => { nodesTree.resetSelection(); - this.__populateSecondaryColumn(this.getStudy()); - this.__evalIframe(); - this.__openWorkbenchTab(); - this.__loggerView.setCurrentNodeId(null); + this.showPipeline(); + + this.getStudy().getUi().setCurrentNodeId(this.getStudy().getUuid()); }); nodesTree.addListener("changeSelectedNode", e => { studyTreeItem.resetSelection(); @@ -514,6 +512,8 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__loggerView.setCurrentNodeId(nodeId); this.__workbenchUI.nodeSelected(nodeId); this.fireDataEvent("changeSelectedNode", nodeId); + + this.getStudy().getUi().setCurrentNodeId(nodeId); }); if (this.__workbenchUIConnected === null) { @@ -529,9 +529,13 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__evalIframe(node); this.__loggerView.setCurrentNodeId(nodeId); this.fireDataEvent("changeSelectedNode", nodeId); + + this.getStudy().getUi().setCurrentNodeId(nodeId); } else { // empty selection this.__studyTreeItem.selectStudyItem(); + + this.getStudy().getUi().setCurrentNodeId(this.getStudy().getUuid()); } }); workbenchUI.addListener("nodeSelected", e => { @@ -545,6 +549,8 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__populateSecondaryColumn(node); this.__openIframeTab(node); this.__loggerView.setCurrentNodeId(nodeId); + + this.getStudy().getUi().setCurrentNodeId(nodeId); } }, this); } @@ -553,16 +559,8 @@ qx.Class.define("osparc.desktop.WorkbenchView", { const nodeId = e.getData(); if (nodeId) { studyTreeItem.resetSelection(); - const workbench = this.getStudy().getWorkbench(); - const node = workbench.getNode(nodeId); - if (node) { - this.__populateSecondaryColumn(node); - this.__openIframeTab(node); - node.getLoadingPage().maximizeIFrame(true); - node.getIFrame().maximizeIFrame(true); - } - this.__loggerView.setCurrentNodeId(nodeId); - this.__workbenchUI.nodeSelected(nodeId); + this.fullscreenNode(nodeId); + this.getStudy().getUi().setCurrentNodeId(nodeId); } }, this); nodesTree.addListener("removeNode", e => { @@ -839,11 +837,11 @@ qx.Class.define("osparc.desktop.WorkbenchView", { const startAppBtn = this.__startAppButton = new qx.ui.form.Button().set({ label: this.tr("Start"), - icon: "@FontAwesome5Solid/play/14", + icon: osparc.dashboard.CardBase.MODE_APP, toolTipText: this.tr("Start App Mode"), height: buttonsHeight }); - startAppBtn.addListener("execute", () => this.fireEvent("slidesAppStart"), this); + startAppBtn.addListener("execute", () => this.getStudy().getUi().setMode("app"), this); slideshowButtons.add(startAppBtn); this.__evalSlidesButtons(); @@ -1017,7 +1015,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { // HEADER const nodeMetadata = node.getMetaData(); - const version = osparc.service.Utils.getVersionDisplay(nodeMetadata["key"], nodeMetadata["version"]); + const version = osparc.store.Services.getVersionDisplay(nodeMetadata["key"], nodeMetadata["version"]); const header = new qx.ui.basic.Label(`${nodeMetadata["name"]} ${version}`).set({ paddingLeft: 5 }); @@ -1183,21 +1181,38 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__nodesTree.nodeSelected(this.__currentNodeId); }, + showPipeline: function() { + this.__populateSecondaryColumn(this.getStudy()); + this.__evalIframe(); + this.__openWorkbenchTab(); + this.__loggerView.setCurrentNodeId(null); + + this.getStudy().getUi().setCurrentNodeId(this.getStudy().getUuid()); + }, + + fullscreenNode: function(nodeId) { + const node = this.getStudy().getWorkbench().getNode(nodeId); + if (node && node.isDynamic()) { + qx.event.Timer.once(() => { + this.__populateSecondaryColumn(node); + this.__openIframeTab(node); + node.getIFrame().maximizeIFrame(true); + }, this, 10); + } + this.__loggerView.setCurrentNodeId(nodeId); + this.__workbenchUI.nodeSelected(nodeId); + }, + openFirstNode: function() { - const nodes = this.getStudy().getWorkbench().getNodes(); - const validNodes = Object.values(nodes).filter(node => node.isComputational() || node.isDynamic()); + const validNodes = this.getStudy().getNonFrontendNodes(); if (validNodes.length === 1 && validNodes[0].isDynamic()) { const dynamicNode = validNodes[0]; - this.nodeSelected(dynamicNode.getNodeId()); - qx.event.Timer.once(() => { - this.__openIframeTab(dynamicNode); - dynamicNode.getLoadingPage().maximizeIFrame(true); - dynamicNode.getIFrame().maximizeIFrame(true); - }, this, 10); - return; + this.fullscreenNode(dynamicNode.getNodeId()); + this.getStudy().getUi().setCurrentNodeId(dynamicNode.getNodeId()); + } else { + this.setMaximized(false); + this.nodeSelected(this.getStudy().getUuid()); } - this.setMaximized(false); - this.nodeSelected(this.getStudy().getUuid()); } } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js b/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js index 79c54bbc1a9..8c3d301923e 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js @@ -116,13 +116,10 @@ qx.Class.define("osparc.desktop.account.DeleteAccount", { osparc.data.Resources.fetch("auth", "unregister", params) .then(() => { const msg = this.tr(`You account will be deleted in ${retentionDays} days`); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); this.fireEvent("deleted"); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => deleteBtn.setFetching(false)); } } diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js index 0df1dc1d8be..a77f891f328 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js @@ -24,12 +24,24 @@ qx.Class.define("osparc.desktop.account.MyAccount", { const miniProfile = osparc.desktop.account.MyAccount.createMiniProfileView().set({ paddingRight: 10 }); - this.addWidgetOnTopOfTheTabs(miniProfile); + this.addWidgetToTabs(miniProfile); this.__profilePage = this.__addProfilePage(); - if (osparc.data.Permissions.getInstance().canDo("usage.all.read")) { - this.__usagePage = this.__addUsagePage(); + // show Usage in My Account if wallets are not enabled. If they are enabled it will be in the BIlling Center + if (!osparc.desktop.credits.Utils.areWalletsEnabled()) { + if (osparc.data.Permissions.getInstance().canDo("usage.all.read")) { + this.__usagePage = this.__addUsagePage(); + } + } + + this.__addGeneralSettings(); + this.__addConfirmationSettings(); + if (osparc.product.Utils.showPreferencesTokens()) { + this.__addTokensPage(); + } + if (osparc.data.Permissions.getInstance().canDo("user.tag")) { + this.__addTagsPage(); } }, @@ -96,6 +108,7 @@ qx.Class.define("osparc.desktop.account.MyAccount", { members: { __profilePage: null, __usagePage: null, + __tagsPage: null, __addProfilePage: function() { const title = this.tr("Profile"); @@ -112,10 +125,39 @@ qx.Class.define("osparc.desktop.account.MyAccount", { const page = this.addTab(title, iconSrc, usageOverview); return page; }, + __addGeneralSettings: function() { + const title = this.tr("Settings"); + const iconSrc = "@FontAwesome5Solid/cogs/22"; + const generalPage = new osparc.desktop.preferences.pages.GeneralPage(); + this.addTab(title, iconSrc, generalPage); + }, - openProfile: function() { - this._openPage(this.__profilePage); - return true; - } + __addConfirmationSettings: function() { + const title = this.tr("Confirmations"); + const iconSrc = "@FontAwesome5Solid/question-circle/22"; + const confirmPage = new osparc.desktop.preferences.pages.ConfirmationsPage(); + this.addTab(title, iconSrc, confirmPage); + }, + + __addTokensPage: function() { + const title = this.tr("API Keys/Tokens"); + const iconSrc = "@FontAwesome5Solid/exchange-alt/22"; + const tokensPage = new osparc.desktop.preferences.pages.TokensPage(); + this.addTab(title, iconSrc, tokensPage); + }, + + __addTagsPage: function() { + const title = this.tr("Create/Edit Tags"); + const iconSrc = "@FontAwesome5Solid/tags/22"; + const tagsPage = new osparc.desktop.preferences.pages.TagsPage(); + const page = this.__tagsPage = this.addTab(title, iconSrc, tagsPage); + osparc.utils.Utils.setIdToWidget(page.getChildControl("button"), "preferencesTagsTabBtn"); + }, + + openTags: function() { + if (this.__tagsPage) { + this._openPage(this.__tagsPage); + } + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccountWindow.js b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccountWindow.js index a86eeeb9507..3dcaefa8160 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccountWindow.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccountWindow.js @@ -21,7 +21,7 @@ qx.Class.define("osparc.desktop.account.MyAccountWindow", { construct: function() { this.base(arguments, "credits", this.tr("My Account")); - const width = 900; + const width = 990; const height = 700; const maxHeight = 700; this.set({ @@ -46,8 +46,8 @@ qx.Class.define("osparc.desktop.account.MyAccountWindow", { members: { __myAccount: null, - openProfile: function() { - return this.__myAccount.openProfile(); - } + openTags: function() { + this.__myAccount.openTags(); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js b/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js index 53fcc95d0b8..e522d53975f 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js @@ -31,12 +31,13 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { this._setLayout(new qx.ui.layout.VBox(15)); + this.__userProfileData = {}; + this.__userPrivacyData = {}; + this.__fetchProfile(); this._add(this.__createProfileUser()); - if (osparc.utils.Utils.isDevelopmentPlatform()) { - this._add(this.__createPrivacySection()); - } + this._add(this.__createPrivacySection()); if (osparc.store.StaticInfo.getInstance().is2FARequired()) { this._add(this.__create2FASection()); } @@ -47,8 +48,12 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { members: { __userProfileData: null, __userProfileModel: null, + __userProfileRenderer: null, + __updateProfileBtn: null, __userPrivacyData: null, __userPrivacyModel: null, + __updatePrivacyBtn: null, + __userProfileForm: null, __fetchProfile: function() { osparc.data.Resources.getOne("profile", {}, null, false) @@ -56,9 +61,7 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { this.__setDataToProfile(profile); this.__setDataToPrivacy(profile["privacy"]); }) - .catch(err => { - console.error(err); - }); + .catch(err => console.error(err)); }, __setDataToProfile: function(data) { @@ -72,16 +75,29 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { "expirationDate": data["expirationDate"] || null, }); } + this.__updateProfileBtn.setEnabled(false); }, __setDataToPrivacy: function(privacyData) { if (privacyData) { this.__userPrivacyData = privacyData; this.__userPrivacyModel.set({ + "hideUsername": "hideUsername" in privacyData ? privacyData["hideUsername"] : false, "hideFullname": "hideFullname" in privacyData ? privacyData["hideFullname"] : true, "hideEmail": "hideEmail" in privacyData ? privacyData["hideEmail"] : true, }); + + const visibleIcon = "@FontAwesome5Solid/eye/12"; + const hiddenIcon = "@FontAwesome5Solid/eye-slash/12"; + const icons = { + 0: this.__userPrivacyModel.getHideUsername() ? hiddenIcon : visibleIcon, + 1: this.__userPrivacyModel.getHideFullname() ? hiddenIcon : visibleIcon, + 2: this.__userPrivacyModel.getHideFullname() ? hiddenIcon : visibleIcon, + 3: this.__userPrivacyModel.getHideEmail() ? hiddenIcon : visibleIcon, + }; + this.__userProfileRenderer.setIcons(icons); } + this.__updatePrivacyBtn.setEnabled(false); }, __createProfileUser: function() { @@ -108,12 +124,13 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { readOnly: true }); - const form = new qx.ui.form.Form(); - form.add(username, "Username", null, "username"); - form.add(firstName, "First Name", null, "firstName"); - form.add(lastName, "Last Name", null, "lastName"); - form.add(email, "Email", null, "email"); - box.add(new qx.ui.form.renderer.Single(form)); + const profileForm = this.__userProfileForm = new qx.ui.form.Form(); + profileForm.add(username, "Username", null, "username"); + profileForm.add(firstName, "First Name", null, "firstName"); + profileForm.add(lastName, "Last Name", null, "lastName"); + profileForm.add(email, "Email", null, "email"); + const singleWithIcon = this.__userProfileRenderer = new osparc.ui.form.renderer.SingleWithIcon(profileForm); + box.add(singleWithIcon); const expirationLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)).set({ paddingLeft: 16, @@ -125,7 +142,7 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { expirationLayout.add(expirationDateLabel); const expirationDate = new qx.ui.basic.Label(); expirationLayout.add(expirationDate); - const infoLabel = this.tr("Please contact us by email:
"); + const infoLabel = this.tr("Please contact us via email:
"); const infoExtension = new osparc.ui.hint.InfoHint(infoLabel); const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); infoExtension.setHintText(infoLabel + supportEmail); @@ -170,21 +187,23 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { namesValidator.add(firstName, qx.util.Validate.regExp(/[^\.\d]+/), this.tr("Avoid dots or numbers in text")); namesValidator.add(lastName, qx.util.Validate.regExp(/^$|[^\.\d]+/), this.tr("Avoid dots or numbers in text")); // allow also empty last name - const updateBtn = new qx.ui.form.Button("Update Profile").set({ + const updateProfileBtn = this.__updateProfileBtn = new qx.ui.form.Button().set({ + label: this.tr("Update Profile"), appearance: "form-button", alignX: "right", - allowGrowX: false + allowGrowX: false, + enabled: false, }); - box.add(updateBtn); + box.add(updateProfileBtn); - updateBtn.addListener("execute", () => { + updateProfileBtn.addListener("execute", () => { if (!osparc.data.Permissions.getInstance().canDo("user.user.update", true)) { this.__resetUserData(); return; } const patchData = {}; - if (this.__userProfileData["username"] !== model.getUsername()) { + if (this.__userProfileData["userName"] !== model.getUsername()) { patchData["userName"] = model.getUsername(); } if (this.__userProfileData["first_name"] !== model.getFirstName()) { @@ -204,61 +223,83 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { this.__setDataToProfile(Object.assign(this.__userProfileData, params.data)); osparc.auth.Manager.getInstance().updateProfile(this.__userProfileData); const msg = this.tr("Profile updated"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); }) .catch(err => { this.__resetUserData(); - const msg = err.message || this.tr("Failed to update profile"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - console.error(err); + osparc.FlashMessenger.logError(err, this.tr("Unsuccessful profile update")); }); } } }); + const profileFields = [ + username, + firstName, + lastName, + ] + const valueChanged = () => { + const anyChanged = + username.getValue() !== this.__userProfileData["userName"] || + firstName.getValue() !== this.__userProfileData["first_name"] || + lastName.getValue() !== this.__userProfileData["last_name"]; + updateProfileBtn.setEnabled(anyChanged); + }; + valueChanged(); + profileFields.forEach(privacyField => privacyField.addListener("changeValue", () => valueChanged())); + return box; }, __createPrivacySection: function() { + // binding to a model + const defaultModel = { + "hideUsername": false, + "hideFullname": true, + "hideEmail": true, + }; + + const privacyModel = this.__userPrivacyModel = qx.data.marshal.Json.createModel(defaultModel, true); + const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Privacy")); box.set({ alignX: "left", maxWidth: 500 }); - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("For Privacy reasons, you might want to hide your Full Name and/or the email to other users")); + const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("For Privacy reasons, you might want to hide some personal data.")); box.add(label); + const hideUsername = new qx.ui.form.CheckBox().set({ + value: defaultModel.hideUsername + }); const hideFullname = new qx.ui.form.CheckBox().set({ - value: true + value: defaultModel.hideFullname }); const hideEmail = new qx.ui.form.CheckBox().set({ - value: true + value: defaultModel.hideEmail }); - const form = new qx.ui.form.Form(); - form.add(hideFullname, "Hide Full Name", null, "hideFullname"); - form.add(hideEmail, "Hide email", null, "hideEmail"); - box.add(new qx.ui.form.renderer.Single(form)); + const privacyForm = new qx.ui.form.Form(); + privacyForm.add(hideUsername, "Hide Username", null, "hideUsername"); + privacyForm.add(hideFullname, "Hide Full Name", null, "hideFullname"); + privacyForm.add(hideEmail, "Hide Email", null, "hideEmail"); + box.add(new qx.ui.form.renderer.Single(privacyForm)); - // binding to a model - const raw = { - "hideFullname": true, - "hideEmail": true, - }; + const privacyModelCtrl = new qx.data.controller.Object(privacyModel); + privacyModelCtrl.addTarget(hideUsername, "value", "hideUsername", true); + privacyModelCtrl.addTarget(hideFullname, "value", "hideFullname", true); + privacyModelCtrl.addTarget(hideEmail, "value", "hideEmail", true); - const model = this.__userPrivacyModel = qx.data.marshal.Json.createModel(raw); - const controller = new qx.data.controller.Object(model); - controller.addTarget(hideFullname, "value", "hideFullname", true); - controller.addTarget(hideEmail, "value", "hideEmail", true); - - const privacyBtn = new qx.ui.form.Button("Update Privacy").set({ + const updatePrivacyBtn = this.__updatePrivacyBtn = new qx.ui.form.Button().set({ + label: this.tr("Update Privacy"), appearance: "form-button", alignX: "right", - allowGrowX: false + allowGrowX: false, + enabled: false, }); - box.add(privacyBtn); - privacyBtn.addListener("execute", () => { + box.add(updatePrivacyBtn); + updatePrivacyBtn.addListener("execute", () => { if (!osparc.data.Permissions.getInstance().canDo("user.user.update", true)) { this.__resetPrivacyData(); return; @@ -266,11 +307,27 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { const patchData = { "privacy": {} }; - if (this.__userPrivacyData["hideFullname"] !== model.getHideFullname()) { - patchData["privacy"]["hideFullname"] = model.getHideFullname(); + if (this.__userPrivacyData["hideUsername"] !== privacyModel.getHideUsername()) { + patchData["privacy"]["hideUsername"] = privacyModel.getHideUsername(); + } + if (this.__userPrivacyData["hideFullname"] !== privacyModel.getHideFullname()) { + patchData["privacy"]["hideFullname"] = privacyModel.getHideFullname(); } - if (this.__userPrivacyData["hideEmail"] !== model.getHideEmail()) { - patchData["privacy"]["hideEmail"] = model.getHideEmail(); + if (this.__userPrivacyData["hideEmail"] !== privacyModel.getHideEmail()) { + patchData["privacy"]["hideEmail"] = privacyModel.getHideEmail(); + } + + if ( + "hideFullname" in patchData["privacy"] && + patchData["privacy"]["hideFullname"] === false && + this.__userProfileData["first_name"] === null + ) { + this.__userProfileForm.getItem("firstName").set({ + invalidMessage: qx.locale.Manager.tr("Name is required"), + valid: false + }); + osparc.FlashMessenger.logAs(this.tr("Set the Name first"), "WARNING"); + return; } if (Object.keys(patchData["privacy"]).length) { @@ -281,22 +338,50 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { .then(() => { this.__setDataToPrivacy(Object.assign(this.__userPrivacyData, params.data["privacy"])); const msg = this.tr("Privacy updated"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); }) .catch(err => { this.__resetPrivacyData(); - const msg = err.message || this.tr("Failed to update privacy"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - console.error(err); + osparc.FlashMessenger.logError(err, this.tr("Unsuccessful privacy update")); }); } }); + const optOutMessage = new qx.ui.basic.Atom().set({ + label: this.tr("If all searchable fields are hidden, you will not be discoverable."), + icon: "@FontAwesome5Solid/exclamation-triangle/14", + gap: 8, + allowGrowX: false, + }); + optOutMessage.getChildControl("icon").setTextColor("warning-yellow") + box.add(optOutMessage); + + const privacyFields = [ + hideUsername, + hideFullname, + hideEmail, + ] + const valueChanged = () => { + const anyChanged = + hideUsername.getValue() !== this.__userPrivacyData["hideUsername"] || + hideFullname.getValue() !== this.__userPrivacyData["hideFullname"] || + hideEmail.getValue() !== this.__userPrivacyData["hideEmail"]; + updatePrivacyBtn.setEnabled(anyChanged); + + if (privacyFields.every(privacyField => privacyField.getValue())) { + optOutMessage.show(); + } else { + optOutMessage.exclude(); + } + }; + valueChanged(); + privacyFields.forEach(privacyField => privacyField.addListener("changeValue", () => valueChanged())); + return box; }, __create2FASection: function() { - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("2 Factor Authentication")); + const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Two-Factor Authentication")); const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Set your preferred method to use for two-factor authentication when signing in:")); box.add(label); @@ -334,7 +419,7 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { if (selectedId === "DISABLED") { const discourageTitle = this.tr("You are about to disable the 2FA"); const discourageText = this.tr("\ - The 2 Factor Authentication is one more measure to prevent hackers from accessing your account with an additional layer of security. \ + The Two-Factor Authentication is one more measure to prevent hackers from accessing your account with an additional layer of security. \ When you sign in, 2FA helps make sure that your resources and personal information stays private, safe and secure.\ "); const win = new osparc.ui.window.Confirmation(discourageTitle).set({ @@ -431,8 +516,8 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { }); }) .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Failed to reset password"), "ERROR"); + const msg = this.tr("Unsuccessful password reset"); + osparc.FlashMessenger.logError(err, msg); [currentPassword, newPassword, confirm].forEach(item => { item.resetValue(); }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js b/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js index 2265aff3e9a..c5609b789e0 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/AutoRecharge.js @@ -290,7 +290,7 @@ qx.Class.define("osparc.desktop.credits.AutoRecharge", { .then(arData => { this.__populateForm(arData); wallet.setAutoRecharge(arData); - osparc.FlashMessenger.getInstance().logAs(successfulMsg, "INFO"); + osparc.FlashMessenger.logAs(successfulMsg, "INFO"); this.fireEvent("close"); }) .finally(() => fetchButton.setFetching(false)); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenter.js b/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenter.js index d27acc99fe0..3093df0cd7d 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenter.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/BillingCenter.js @@ -24,7 +24,7 @@ qx.Class.define("osparc.desktop.credits.BillingCenter", { const miniWallet = this.self().createMiniWalletView().set({ paddingRight: 10 }); - this.addWidgetOnTopOfTheTabs(miniWallet); + this.addWidgetToTabs(miniWallet); this.__walletsPage = this.__addWalletsPage(); this.__paymentMethodsPage = this.__addPaymentMethodsPage(); @@ -36,22 +36,33 @@ qx.Class.define("osparc.desktop.credits.BillingCenter", { if (osparc.product.Utils.showS4LStore()) { this.__addPurchasesPage(); - this.__addCheckoutsPage(); + // For now, do not add checkouts page + // this.__addCheckoutsPage(); } }, statics: { createMiniWalletView: function() { - const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(8)).set({ + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(6)).set({ alignX: "center", minWidth: 120, maxWidth: 150 }); - const store = osparc.store.Store.getInstance(); + const walletName = new qx.ui.basic.Label().set({ + alignX: "center" + }); + layout.add(walletName); const creditsIndicator = new osparc.desktop.credits.CreditsIndicator(); - store.bind("contextWallet", creditsIndicator, "wallet"); layout.add(creditsIndicator); + const store = osparc.store.Store.getInstance(); + store.bind("contextWallet", walletName, "value", { + converter: wallet => wallet.getName() + }); + store.bind("contextWallet", walletName, "toolTipText", { + converter: wallet => wallet.getName() + }); + store.bind("contextWallet", creditsIndicator, "wallet"); layout.add(new qx.ui.core.Spacer(15, 15)); @@ -102,7 +113,7 @@ qx.Class.define("osparc.desktop.credits.BillingCenter", { __addPurchasesPage: function() { const title = this.tr("Purchases"); - const iconSrc = "@FontAwesome5Solid/list/22"; + const iconSrc = "@FontAwesome5Solid/shopping-bag/22"; const purchases = new osparc.desktop.credits.Purchases(); const page = this.addTab(title, iconSrc, purchases); return page; diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsInput.js b/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsInput.js index 25a7086599d..3e8a3a30717 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsInput.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsInput.js @@ -57,17 +57,18 @@ qx.Class.define("osparc.desktop.credits.BuyCreditsInput", { paddingRight: 0 }); this.__totalInput = totalInput; - amountInput.addListener("changeValue", e => { - const value = e.getData(); + const amountChanged = value => { totalInput.setValue(value ? 1 * (value * this.__pricePerCredit).toFixed(2) + this.__currencySymbol : "-"); this.fireDataEvent("input", this.getValues()); - }); + } + amountInput.getChildControl("textfield").addListener("input", e => amountChanged(e.getData())); + amountInput.addListener("changeValue", e => amountChanged(e.getData())); this._add(totalContainer); osparc.store.Store.getInstance().getMinimumAmount() .then(minimum => { amountInput.set({ - maximum: 10000, + maximum: 100000, minimum: Math.ceil(minimum/this.__pricePerCredit), value: Math.ceil(minimum/this.__pricePerCredit) }); @@ -81,7 +82,7 @@ qx.Class.define("osparc.desktop.credits.BuyCreditsInput", { const input = new qx.ui.form.TextField().set({ appearance: "appmotion-buy-credits-input", textAlign: "center", - width: 90, + width: 100, ...inputProps }); const label = new qx.ui.basic.Label(labelText); @@ -96,7 +97,7 @@ qx.Class.define("osparc.desktop.credits.BuyCreditsInput", { })); const input = new qx.ui.form.Spinner().set({ appearance: "appmotion-buy-credits-spinner", - width: 100, + width: 110, ...inputProps }); input.getChildControl("textfield").set({ diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js b/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js index f569fb05b34..9789fd0fbc6 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js @@ -59,8 +59,7 @@ qx.Class.define("osparc.desktop.credits.BuyCreditsStepper", { }); }) .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); this.__form.setFetching(false); }) } else { @@ -79,10 +78,7 @@ qx.Class.define("osparc.desktop.credits.BuyCreditsStepper", { } }); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.__form.setFetching(false)); } }); @@ -95,14 +91,14 @@ qx.Class.define("osparc.desktop.credits.BuyCreditsStepper", { const msg = this.tr("Payment ") + osparc.utils.Utils.onlyFirstsUp(paymentData.completedStatus); switch (paymentData.completedStatus) { case "SUCCESS": - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); + osparc.FlashMessenger.logAs(msg, "INFO"); break; case "PENDING": - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); break; case "CANCELED": case "FAILED": - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logError(msg); break; default: console.error("completedStatus unknown"); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTable.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTable.js index a757d6e3f2f..b506beca86d 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTable.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTable.js @@ -80,7 +80,7 @@ qx.Class.define("osparc.desktop.credits.CheckoutsTable", { id: "user", column: 6, label: qx.locale.Manager.tr("User"), - width: 100 + width: 150 }, } } diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTableModel.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTableModel.js index fe59f001d04..8e25d5740b7 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTableModel.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CheckoutsTableModel.js @@ -122,19 +122,16 @@ qx.Class.define("osparc.desktop.credits.CheckoutsTableModel", { return Promise.all([ licensedItemsStore.getLicensedItems(), licensedItemsStore.getCheckedOutLicensedItems(walletId, urlParams), - licensedItemsStore.getVipModels(), ]) .then(values => { const licensedItems = values[0]; const checkoutsItems = values[1]; - const vipModels = values[2]; const data = []; const checkoutsCols = osparc.desktop.credits.CheckoutsTable.COLS; checkoutsItems.forEach(checkoutsItem => { const licensedItemId = checkoutsItem["licensedItemId"]; - const licensedItem = licensedItems.find(licItem => licItem["licensedItemId"] === licensedItemId); - const vipModel = vipModels.find(vipMdl => licensedItem && (vipMdl["modelId"] == licensedItem["name"])); + const licensedItem = licensedItems[licensedItemId]; let start = ""; let duration = ""; if (checkoutsItem["startedAt"]) { @@ -146,11 +143,11 @@ qx.Class.define("osparc.desktop.credits.CheckoutsTableModel", { data.push({ [checkoutsCols.CHECKOUT_ID.id]: checkoutsItem["licensedItemCheckoutId"], [checkoutsCols.ITEM_ID.id]: licensedItemId, - [checkoutsCols.ITEM_LABEL.id]: vipModel ? vipModel["name"] : "unknown model", + [checkoutsCols.ITEM_LABEL.id]: licensedItem ? licensedItem.getDisplayName() : "unknown model", [checkoutsCols.START.id]: start, [checkoutsCols.DURATION.id]: duration, [checkoutsCols.SEATS.id]: checkoutsItem["numOfSeats"], - [checkoutsCols.USER.id]: checkoutsItem["userId"], + [checkoutsCols.USER.id]: checkoutsItem["userEmail"], }); }); return data; diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js index 37092f5fd47..25ae3083b79 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js @@ -84,11 +84,10 @@ qx.Class.define("osparc.desktop.credits.CreditsIndicatorButton", { }, __handleOutsideEvent: function(event) { - const offset = 30; - if ( - !osparc.utils.Utils.isMouseOnElement(this.__creditsContainer, event, offset) && - !osparc.utils.Utils.isMouseOnElement(this, event, offset) - ) { + const offset = 0; + const onContainer = osparc.utils.Utils.isMouseOnElement(this.__creditsContainer, event, offset); + const onButton = osparc.utils.Utils.isMouseOnElement(this, event, offset); + if (!onContainer && !onButton) { this.__hideCreditsContainer(); } }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsPerService.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsPerService.js index 182924d225b..e08659fa8f3 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsPerService.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsPerService.js @@ -89,8 +89,9 @@ qx.Class.define("osparc.desktop.credits.CreditsPerService", { this._add(uiEntry); }); } else { - const nothingFound = new qx.ui.basic.Label(this.tr("No usage found")).set({ - font: "text-14" + const nothingFound = new qx.ui.basic.Label(this.tr("No usage records found")).set({ + font: "text-14", + padding: 20, }); this._add(nothingFound); } diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsServiceListItem.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsServiceListItem.js index b12da620b97..0e5181f5631 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsServiceListItem.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsServiceListItem.js @@ -31,12 +31,13 @@ qx.Class.define("osparc.desktop.credits.CreditsServiceListItem", { const icon = this.getChildControl("icon"); const name = this.getChildControl("title"); - const serviceMetadata = osparc.service.Utils.getLatest(serviceKey); + const serviceMetadata = osparc.store.Services.getLatest(serviceKey); if (serviceMetadata) { - icon.setSource(serviceMetadata["thumbnail"] ? serviceMetadata["thumbnail"] : osparc.dashboard.CardBase.PRODUCT_ICON); + const source = osparc.utils.Utils.getIconFromResource(serviceMetadata); + icon.setSource(source); name.setValue(serviceMetadata["name"]); } else { - icon.setSource(osparc.dashboard.CardBase.PRODUCT_ICON); + icon.setSource(osparc.dashboard.CardBase.PRODUCT_THUMBNAIL); const serviceName = serviceKey.split("/").pop(); name.setValue(serviceName); } diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js index 45461b39b9f..b6fa37b07ac 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js @@ -27,6 +27,7 @@ qx.Class.define("osparc.desktop.credits.CreditsSummary", { appearance: "floating-menu", padding: 8, maxWidth: this.self().WIDTH, + minHeight: 150, zIndex: osparc.utils.Utils.FLOATING_Z_INDEX, }); osparc.utils.Utils.setIdToWidget(this, "creditsSummary"); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTable.js b/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTable.js index 8cd31c0287c..8eeb4d7a28b 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTable.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTable.js @@ -87,7 +87,7 @@ qx.Class.define("osparc.desktop.credits.PurchasesTable", { id: "user", column: 7, label: qx.locale.Manager.tr("User"), - width: 100 + width: 150 }, } } diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTableModel.js b/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTableModel.js index b1b054071ce..97502433ff2 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTableModel.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/PurchasesTableModel.js @@ -121,28 +121,25 @@ qx.Class.define("osparc.desktop.credits.PurchasesTableModel", { return Promise.all([ licensedItemsStore.getLicensedItems(), licensedItemsStore.getPurchasedLicensedItems(walletId, urlParams), - licensedItemsStore.getVipModels(), ]) .then(values => { const licensedItems = values[0]; const purchasesItems = values[1]; - const vipModels = values[2]; const data = []; const purchasesCols = osparc.desktop.credits.PurchasesTable.COLS; purchasesItems.forEach(purchasesItem => { const licensedItemId = purchasesItem["licensedItemId"]; - const licensedItem = licensedItems.find(licItem => licItem["licensedItemId"] === licensedItemId); - const vipModel = vipModels.find(vipMdl => licensedItem && (vipMdl["modelId"] == licensedItem["name"])); + const licensedItem = licensedItems[licensedItemId]; data.push({ [purchasesCols.PURCHASE_ID.id]: purchasesItem["licensedItemPurchaseId"], [purchasesCols.ITEM_ID.id]: licensedItemId, - [purchasesCols.ITEM_LABEL.id]: vipModel ? vipModel["name"] : "unknown model", + [purchasesCols.ITEM_LABEL.id]: licensedItem ? licensedItem.getDisplayName() : "unknown model", [purchasesCols.START.id]: osparc.utils.Utils.formatDateAndTime(new Date(purchasesItem["startAt"])), [purchasesCols.END.id]: osparc.utils.Utils.formatDateAndTime(new Date(purchasesItem["expireAt"])), [purchasesCols.SEATS.id]: purchasesItem["numOfSeats"], [purchasesCols.COST.id]: purchasesItem["pricingUnitCost"] ? ("-" + parseFloat(purchasesItem["pricingUnitCost"]).toFixed(2)) : "", // show it negative - [purchasesCols.USER.id]: purchasesItem["purchasedByUser"], + [purchasesCols.USER.id]: purchasesItem["userEmail"], }); }); return data; diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js index 2f0a016a8df..c7f8ca40578 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js @@ -243,8 +243,8 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const canIDelete = organization.getAccessRights()["delete"]; const introText = canIWrite ? - this.tr("You can add new members and change their roles.") : - this.tr("You can't add new members to this Organization. Please contact an Administrator or Manager."); + this.tr("You can add new members and assign roles.") : + this.tr("You cannot add new members to this Organization. Please contact an Administrator or Manager."); this.__introLabel.setValue(introText); this.__addMembersButton.set({ @@ -342,17 +342,13 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { request .then(newMember => { const text = newMemberIdentifier + this.tr(" successfully added"); - osparc.FlashMessenger.getInstance().logAs(text); + osparc.FlashMessenger.logAs(text); this.__reloadOrgMembers(); // push 'NEW_ORGANIZATION' notification osparc.notification.Notifications.postNewOrganization(newMember.getUserId(), orgId); }) - .catch(err => { - const errorMessage = err["message"] || this.tr("Something went wrong adding the user"); - osparc.FlashMessenger.getInstance().logAs(errorMessage, "ERROR"); - console.error(err); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while adding the user"))); }, __promoteToUser: function(listedMember) { @@ -364,12 +360,12 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const groupsStore = osparc.store.Groups.getInstance(); groupsStore.patchMember(this.__currentOrg.getGroupId(), listedMember["id"], newAccessRights) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr(`Successfully promoted to ${osparc.data.Roles.ORG[1].label}`)); + osparc.FlashMessenger.logAs(this.tr(`Successfully promoted to ${osparc.data.Roles.ORG[1].label}`)); this.__reloadOrgMembers(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong promoting to ") + osparc.data.Roles.ORG[1].label, "ERROR"); - console.error(err); + const msg = this.tr("Something went wrong while promoting to ") + osparc.data.Roles.ORG[1].label; + osparc.FlashMessenger.logError(err, msg); }); }, @@ -387,12 +383,12 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { if (msg === undefined) { msg = this.tr(`Successfully demoted to ${osparc.data.Roles.ORG[0].label}`); } - osparc.FlashMessenger.getInstance().logAs(msg); + osparc.FlashMessenger.logAs(msg); this.__reloadOrgMembers(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong demoting to ") + osparc.data.Roles.ORG[0].label, "ERROR"); - console.error(err); + const errorMsg = this.tr("Something went wrong while demoting to ") + osparc.data.Roles.ORG[0].label; + osparc.FlashMessenger.logError(err, errorMsg); }); }, @@ -407,12 +403,12 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const groupsStore = osparc.store.Groups.getInstance(); groupsStore.patchAccessRights(orgId, userId, newAccessRights) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr(`Successfully promoted to ${osparc.data.Roles.ORG[2].label}`)); + osparc.FlashMessenger.logAs(this.tr(`Successfully promoted to ${osparc.data.Roles.ORG[2].label}`)); this.__reloadOrgMembers(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong promoting to ") + osparc.data.Roles.ORG[2].label, "ERROR"); - console.error(err); + const msg = this.tr("Something went wrong while promoting to ") + osparc.data.Roles.ORG[2].label; + osparc.FlashMessenger.logError(err, msg); }); }, @@ -427,12 +423,12 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const groupsStore = osparc.store.Groups.getInstance(); groupsStore.patchAccessRights(orgId, userId, newAccessRights) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr(`Successfully promoted to ${osparc.data.Roles.ORG[3].label}`)); + osparc.FlashMessenger.logAs(this.tr(`Successfully promoted to ${osparc.data.Roles.ORG[3].label}`)); this.__reloadOrgMembers(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong promoting to ") + osparc.data.Roles.ORG[3].label, "ERROR"); - console.error(err); + const msg = this.tr("Something went wrong while promoting to ") + osparc.data.Roles.ORG[3].label; + osparc.FlashMessenger.logError(err, msg); }); }, @@ -447,12 +443,12 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const groupsStore = osparc.store.Groups.getInstance(); groupsStore.patchAccessRights(orgId, userId, newAccessRights) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr(`Successfully demoted to ${osparc.data.Roles.ORG[1].label}`)); + osparc.FlashMessenger.logAs(this.tr(`Successfully demoted to ${osparc.data.Roles.ORG[1].label}`)); this.__reloadOrgMembers(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong demoting to ") + osparc.data.Roles.ORG[1].label, "ERROR"); - console.error(err); + const msg = this.tr("Something went wrong while demoting to ") + osparc.data.Roles.ORG[1].label; + osparc.FlashMessenger.logError(err, msg); }); }, @@ -467,12 +463,12 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const groupsStore = osparc.store.Groups.getInstance(); groupsStore.patchAccessRights(orgId, userId, newAccessRights) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr(`Successfully demoted to ${osparc.data.Roles.ORG[3].label}`)); + osparc.FlashMessenger.logAs(this.tr(`Successfully demoted to ${osparc.data.Roles.ORG[3].label}`)); this.__reloadOrgMembers(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong demoting to ") + osparc.data.Roles.ORG[3].label, "ERROR"); - console.error(err); + const msg =this.tr("Something went wrong while demoting to ") + osparc.data.Roles.ORG[3].label; + osparc.FlashMessenger.logError(err, msg); }); }, @@ -480,12 +476,12 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const groupsStore = osparc.store.Groups.getInstance(); return groupsStore.removeMember(this.__currentOrg.getGroupId(), listedMember["id"]) .then(() => { - osparc.FlashMessenger.getInstance().logAs(listedMember["name"] + this.tr(" successfully removed")); + osparc.FlashMessenger.logAs(listedMember["name"] + this.tr(" successfully removed")); this.__reloadOrgMembers(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing ") + listedMember["name"], "ERROR"); - console.error(err); + const msg = this.tr("Something went wrong while removing ") + listedMember["name"]; + osparc.FlashMessenger.logError(err, msg); }); }, @@ -512,7 +508,7 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { } else if (isThereAnyManager) { rUSure += `
There is no ${osparc.data.Roles.ORG[3].label} in this Organization.`; } - rUSure += "

" + this.tr("If you Leave, the page will be reloaded."); + rUSure += "

" + this.tr("If you leave, the page will reload."); const confirmationWin = new osparc.ui.window.Confirmation(rUSure).set({ caption: this.tr("Leave Organization"), confirmText: this.tr("Leave"), diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js index 8b3781d7a12..bfc757d2880 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js @@ -71,7 +71,7 @@ qx.Class.define("osparc.desktop.organizations.OrganizationDetails", { const titleLayout = this.__titleLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); const prevBtn = new qx.ui.form.Button().set({ - toolTipText: this.tr("Back to Organizations list"), + toolTipText: this.tr("Return to Organizations list"), icon: "@FontAwesome5Solid/arrow-left/20", backgroundColor: "transparent" }); @@ -115,14 +115,14 @@ qx.Class.define("osparc.desktop.organizations.OrganizationDetails", { const thumbnail = orgEditor.getThumbnail(); osparc.store.Groups.getInstance().patchOrganization(groupId, name, description, thumbnail) .then(() => { - osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully edited")); + osparc.FlashMessenger.logAs(name + this.tr(" successfully edited")); button.setFetching(false); win.close(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong editing ") + name, "ERROR"); + const msg = this.tr("Something went wrong while editing ") + name; + osparc.FlashMessenger.logError(err, msg); button.setFetching(false); - console.error(err); }); }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js index c86917fed36..dd488f9b328 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js @@ -234,8 +234,8 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { this.reloadOrganizations(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong deleting ") + name, "ERROR"); - console.error(err); + const errorMsg = this.tr("Something went wrong while deleting ") + name; + osparc.FlashMessenger.logError(err, errorMsg); }) .finally(() => { win.close(); @@ -251,18 +251,16 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { const groupsStore = osparc.store.Groups.getInstance(); groupsStore.postOrganization(name, description, thumbnail) .then(org => { - osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully created")); - button.setFetching(false); + osparc.FlashMessenger.logAs(name + this.tr(" successfully created")); // open it this.reloadOrganizations(org.getGroupId()); }) .catch(err => { - const errorMessage = err["message"] || this.tr("Something went wrong creating ") + name; - osparc.FlashMessenger.getInstance().logAs(errorMessage, "ERROR"); - button.setFetching(false); - console.error(err); + const msg = this.tr("Something went wrong while creating ") + name; + osparc.FlashMessenger.logError(err, msg); }) .finally(() => { + button.setFetching(false); win.close(); }); }, @@ -274,14 +272,14 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { const thumbnail = orgEditor.getThumbnail(); osparc.store.Groups.getInstance().patchOrganization(groupId, name, description, thumbnail) .then(() => { - osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully edited")); + osparc.FlashMessenger.logAs(name + this.tr(" successfully edited")); button.setFetching(false); win.close(); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong editing ") + name, "ERROR"); + const msg = this.tr("Something went wrong while editing ") + name; + osparc.FlashMessenger.logError(err, msg); button.setFetching(false); - console.error(err); }); } } diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js index fec8d6d85e6..2f0635eba58 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js @@ -128,7 +128,7 @@ qx.Class.define("osparc.desktop.organizations.ServicesList", { const orgServiceCopy = osparc.utils.Utils.deepCloneObject(orgService); orgServiceCopy["orgId"] = groupId; if (orgServiceCopy["thumbnail"] === null) { - orgServiceCopy["thumbnail"] = osparc.dashboard.CardBase.PRODUCT_ICON; + orgServiceCopy["thumbnail"] = osparc.dashboard.CardBase.PRODUCT_THUMBNAIL; } servicesModel.append(qx.data.marshal.Json.createModel(orgServiceCopy)); }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodDetails.js b/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodDetails.js index 49dbab989ed..936f032db04 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodDetails.js +++ b/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodDetails.js @@ -47,7 +47,7 @@ qx.Class.define("osparc.desktop.paymentMethods.PaymentMethodDetails", { members: { __buildLayout: function(paymentMethodData) { [ - [this.tr("Holder name"), paymentMethodData["cardHolderName"]], + [this.tr("Card Holder name"), paymentMethodData["cardHolderName"]], [this.tr("Type"), paymentMethodData["cardType"]], [this.tr("Number"), paymentMethodData["cardNumberMasked"]], [this.tr("Expiration date"), paymentMethodData["expirationMonth"] + "/" + paymentMethodData["expirationYear"]] diff --git a/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodListItem.js b/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodListItem.js index 9342f10f066..90c3348826e 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodListItem.js +++ b/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethodListItem.js @@ -182,7 +182,7 @@ qx.Class.define("osparc.desktop.paymentMethods.PaymentMethodListItem", { }, __deletePressed: function() { - const msg = this.tr("Are you sure you want to delete the Payment Method?"); + const msg = this.tr("Are you sure you want to delete this Payment Method?"); const win = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Delete Payment Method"), confirmText: this.tr("Delete"), diff --git a/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethods.js b/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethods.js index fa90ceddbe2..14b64b3a8ea 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethods.js +++ b/services/static-webserver/client/source/class/osparc/desktop/paymentMethods/PaymentMethods.js @@ -118,7 +118,7 @@ qx.Class.define("osparc.desktop.paymentMethods.PaymentMethods", { __windowClosed: function(paymentMethodId) { const msg = this.tr("The window was closed. Try again and follow the instructions inside the opened window."); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); this.__cancelPaymentMethod(paymentMethodId); }, @@ -165,11 +165,8 @@ qx.Class.define("osparc.desktop.paymentMethods.PaymentMethods", { }) .finally(() => this.__fetchingMsg.setVisibility("excluded")) .catch(err => { - console.error(err) - osparc.FlashMessenger.getInstance().logAs( - this.tr("We could not retrieve your saved payment methods. Please try again later."), - "ERROR" - ); + const msg = this.tr("Could not retrieve your saved payment methods. Please try again later."); + osparc.FlashMessenger.logError(err, msg); }); }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/Preferences.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/Preferences.js deleted file mode 100644 index 31f8de5590b..00000000000 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/Preferences.js +++ /dev/null @@ -1,73 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2018 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.desktop.preferences.Preferences", { - extend: osparc.ui.window.TabbedView, - - construct: function() { - this.base(arguments); - - this.__addGeneralSettings(); - this.__addConfirmationSettings(); - if (osparc.product.Utils.showPreferencesTokens()) { - this.__addTokensPage(); - } - if (osparc.data.Permissions.getInstance().canDo("user.tag")) { - this.__addTagsPage(); - } - }, - - members: { - __tagsPage: null, - - __addGeneralSettings: function() { - const title = this.tr("General Settings"); - const iconSrc = "@FontAwesome5Solid/cogs/22"; - const generalPage = new osparc.desktop.preferences.pages.GeneralPage(); - this.addTab(title, iconSrc, generalPage); - }, - - __addConfirmationSettings: function() { - const title = this.tr("Confirmation Settings"); - const iconSrc = "@FontAwesome5Solid/question-circle/22"; - const confirmPage = new osparc.desktop.preferences.pages.ConfirmationsPage(); - this.addTab(title, iconSrc, confirmPage); - }, - - __addTokensPage: function() { - const title = this.tr("API Keys/Tokens"); - const iconSrc = "@FontAwesome5Solid/exchange-alt/22"; - const tokensPage = new osparc.desktop.preferences.pages.TokensPage(); - this.addTab(title, iconSrc, tokensPage); - }, - - __addTagsPage: function() { - const title = this.tr("Create/Edit Tags"); - const iconSrc = "@FontAwesome5Solid/tags/22"; - const tagsPage = new osparc.desktop.preferences.pages.TagsPage(); - const page = this.__tagsPage = this.addTab(title, iconSrc, tagsPage); - osparc.utils.Utils.setIdToWidget(page.getChildControl("button"), "preferencesTagsTabBtn"); - }, - - openTags: function() { - if (this.__tagsPage) { - this._openPage(this.__tagsPage); - return true; - } - }, - } -}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/PreferencesWindow.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/PreferencesWindow.js deleted file mode 100644 index 889a1e54c2e..00000000000 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/PreferencesWindow.js +++ /dev/null @@ -1,54 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2018 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Pedro Crespo (pcrespov) - -************************************************************************ */ - -qx.Class.define("osparc.desktop.preferences.PreferencesWindow", { - extend: osparc.ui.window.TabbedWindow, - - construct: function() { - this.base(arguments, "preferences", this.tr("Preferences")); - - const closeBtn = this.getChildControl("close-button"); - osparc.utils.Utils.setIdToWidget(closeBtn, "preferencesWindowCloseBtn"); - - const width = 750; - const height = 660; - this.set({ - width, - height - }); - - const preferences = this.__preferences = new osparc.desktop.preferences.Preferences(); - this._setTabbedView(preferences); - }, - - statics: { - openWindow: function() { - const preferencesWindow = new osparc.desktop.preferences.PreferencesWindow(); - preferencesWindow.center(); - preferencesWindow.open(); - return preferencesWindow; - } - }, - - members: { - __preferences: null, - - openTags: function() { - return this.__preferences.openTags(); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/BasePage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/BasePage.js index a32ec216d70..449546236ca 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/BasePage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/BasePage.js @@ -30,7 +30,17 @@ qx.Class.define("osparc.desktop.preferences.pages.BasePage", { paddingLeft: 15 }); - this.__showLabelOnTab(title) + this.__showLabelOnTab(title); + + const tabButton = this.getChildControl("button"); + if (tabButton.getIcon() && tabButton.getIcon().includes(".svg")) { + tabButton.getChildControl("icon").set({ + minWidth: 24, + minHeight: 24, + scale: true, + }); + osparc.ui.basic.SVGImage.setColorToImage(tabButton.getChildControl("icon"), "text"); + } }, members: { diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js index 7b9dc943e63..c883c6bd37f 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js @@ -114,7 +114,7 @@ qx.Class.define("osparc.desktop.preferences.pages.ConfirmationsPage", { cbConfirmStopNode.addListener("changeValue", e => osparc.Preferences.patchPreferenceField("confirmStopNode", cbConfirmStopNode, e.getData())); box.add(cbConfirmStopNode); - const cbSnapNodeToGrid = new qx.ui.form.CheckBox(this.tr("Snap Node to grid")); + const cbSnapNodeToGrid = new qx.ui.form.CheckBox(this.tr("Snap Node to Grid")); preferencesSettings.bind("snapNodeToGrid", cbSnapNodeToGrid, "value"); cbSnapNodeToGrid.addListener("changeValue", e => osparc.Preferences.patchPreferenceField("snapNodeToGrid", cbSnapNodeToGrid, e.getData())); box.add(cbSnapNodeToGrid); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js index 70095507e49..6f02c5a65b9 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js @@ -137,7 +137,7 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { __addLowDiskSpaceSetting: function() { const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Low Disk Space Threshold")); - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Set the warning Threshold for low Disk Space availability."), "text-13-italic"); + const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Set the warning Threshold for Low Disk Space availability"), "text-13-italic"); box.add(label); const form = new qx.ui.form.Form(); const diskUsageSpinner = new qx.ui.form.Spinner().set({ diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js index f69ae38e4ee..6ef62565d05 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js @@ -26,64 +26,63 @@ qx.Class.define("osparc.desktop.preferences.pages.TagsPage", { this._add(new qx.ui.core.Spacer(null, 10)); - this.__container = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - this.__container.set({ - paddingLeft: 10 - }); - const scroll = new qx.ui.container.Scroll(this.__container); - this._add(scroll); - - this.__createComponents(); + this.__renderLayout(); }, members: { - __container: null, - __addTagButton: null, - __tagItems: null, + __tagsContainer: null, + + __renderLayout: async function() { + // Tags + this.__tagsContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + this.__tagsContainer.set({ + paddingLeft: 10 + }); + const tagContainerScroll = new qx.ui.container.Scroll(this.__tagsContainer); + this._add(tagContainerScroll, { + flex: 1 + }); - __createComponents: function() { - this.__addTagButton = new qx.ui.form.Button().set({ + const tags = osparc.store.Tags.getInstance().getTags(); + for (const tag of tags) { + await osparc.store.Tags.getInstance().fetchAccessRights(tag); + } + const tagItems = tags.map(tag => new osparc.form.tag.TagItem().set({tag})); + tagItems.forEach(tagItem => { + this.__tagsContainer.add(tagItem); + this.__attachTagItemEvents(tagItem); + }); + + // New tag Button + const addTagButton = new qx.ui.form.Button().set({ appearance: "form-button-outlined", label: this.tr("New Tag"), icon: "@FontAwesome5Solid/plus/14" }); - osparc.utils.Utils.setIdToWidget(this.__addTagButton, "addTagBtn"); - const tags = osparc.store.Tags.getInstance().getTags(); - this.__tagItems = tags.map(tag => new osparc.form.tag.TagItem().set({tag})); - this.__renderLayout(); - this.__attachEventHandlers(); - }, - - __renderLayout: function() { - this.__container.removeAll(); + osparc.utils.Utils.setIdToWidget(addTagButton, "addTagBtn"); + addTagButton.addListener("execute", () => { + const newItem = new osparc.form.tag.TagItem().set({ + mode: osparc.form.tag.TagItem.modes.EDIT + }); + this.__tagsContainer.add(newItem); + this.__attachTagItemEvents(newItem); - // Print tag items - this.__tagItems.forEach(tagItem => this.__container.add(tagItem)); + // scroll down + const height = tagContainerScroll.getSizeHint().height; + tagContainerScroll.scrollToY(height); + }); // New tag button const buttonContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ alignX: "center" })); - buttonContainer.add(new qx.ui.core.Spacer(null, 10)); - buttonContainer.add(this.__addTagButton); - this.__container.add(buttonContainer); - }, - - __attachEventHandlers: function() { - this.__addTagButton.addListener("execute", () => { - const itemCount = this.__container.getChildren().length; - const newItem = new osparc.form.tag.TagItem().set({ - mode: osparc.form.tag.TagItem.modes.EDIT - }); - this.__attachTagItemEvents(newItem); - this.__container.addAt(newItem, Math.max(0, itemCount - 1)); - }); - this.__tagItems.forEach(tagItem => this.__attachTagItemEvents(tagItem)); + buttonContainer.add(addTagButton); + this._add(buttonContainer); }, __attachTagItemEvents: function(tagItem) { - tagItem.addListener("cancelNewTag", e => this.__container.remove(e.getTarget()), this); - tagItem.addListener("deleteTag", e => this.__container.remove(e.getTarget())); + tagItem.addListener("cancelNewTag", e => this.__tagsContainer.remove(e.getTarget()), this); + tagItem.addListener("deleteTag", e => this.__tagsContainer.remove(e.getTarget())); } } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js index 6d5ae0e5258..b22542d6d4d 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js @@ -97,11 +97,7 @@ qx.Class.define("osparc.desktop.preferences.pages.TokensPage", { showAPIKeyWindow.center(); showAPIKeyWindow.open(); }) - .catch(err => { - const errorMsg = err.message || this.tr("Cannot create API Key"); - osparc.FlashMessenger.getInstance().logAs(errorMsg, "ERROR"); - console.error(err); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Cannot create API Key"))) .finally(() => this.__requestAPIKeyBtn.setFetching(false)); }, this); createAPIKeyWindow.open(); @@ -160,11 +156,7 @@ qx.Class.define("osparc.desktop.preferences.pages.TokensPage", { }; osparc.data.Resources.fetch("apiKeys", "delete", params) .then(() => this.__rebuildAPIKeysList()) - .catch(err => { - const errorMsg = err.message || this.tr("Cannot delete API Key"); - osparc.FlashMessenger.getInstance().logAs(errorMsg, "ERROR"); - console.error(err) - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Cannot delete API Key"))); } }, this); }, @@ -181,12 +173,12 @@ qx.Class.define("osparc.desktop.preferences.pages.TokensPage", { __createTokensSection: function() { // layout - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("External Service Tokens")); + const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("API Tokens for External Services")); - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Enter the API tokens to access external services.")); + const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Provide the API tokens needed to access external services.")); box.add(label); - const validTokensGB = this.__validTokensGB = osparc.ui.window.TabbedView.createSectionBox(this.tr("Existing Tokens")); + const validTokensGB = this.__validTokensGB = osparc.ui.window.TabbedView.createSectionBox(this.tr("Current Tokens")); box.add(validTokensGB); const supportedExternalsGB = this.__supportedExternalsGB = osparc.ui.window.TabbedView.createSectionBox(this.tr("Supported services")).set({ @@ -288,7 +280,7 @@ qx.Class.define("osparc.desktop.preferences.pages.TokensPage", { return; } - const msg = this.tr("Do you want to delete the Token?"); + const msg = this.tr("Are you sure you want to delete this token?"); const win = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Delete Token"), confirmText: this.tr("Delete"), @@ -334,13 +326,13 @@ qx.Class.define("osparc.desktop.preferences.pages.TokensPage", { const newTokenKey = new qx.ui.form.TextField(); newTokenKey.set({ - placeholder: this.tr("Input your token key") + placeholder: this.tr("Enter your token key") }); form.add(newTokenKey, this.tr("Key")); const newTokenSecret = new qx.ui.form.TextField(); newTokenSecret.set({ - placeholder: this.tr("Input your token secret") + placeholder: this.tr("Enter your token secret") }); form.add(newTokenSecret, this.tr("Secret")); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js index 3540f23b518..ca5ba07cd70 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/CreateAPIKey.js @@ -17,7 +17,7 @@ qx.Class.define("osparc.desktop.preferences.window.CreateAPIKey", { extend: osparc.desktop.preferences.window.APIKeyBase, construct: function() { - const caption = this.tr("Create API Key"); + const caption = this.tr("Generate API Key"); const infoText = this.tr("Key names must be unique."); this.base(arguments, caption, infoText); @@ -48,8 +48,8 @@ qx.Class.define("osparc.desktop.preferences.window.CreateAPIKey", { if (date) { // allow only future dates if (new Date() > new Date(date)) { - const msg = this.tr("Choose a future date"); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + const msg = this.tr("Select a future date"); + osparc.FlashMessenger.logAs(msg, "WARNING"); expirationDate.resetValue(); } else { expirationDate.setDateFormat(dateFormat); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js index 6c7209d3ecc..8154c1f6296 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js @@ -18,7 +18,7 @@ qx.Class.define("osparc.desktop.preferences.window.ShowAPIKey", { construct: function(key, secret, baseUrl) { const caption = this.tr("API Key"); - const infoText = this.tr("For your protection, store your access keys securely and do not share them. You will not be able to access the key again once this window is closed."); + const infoText = this.tr("For your security, store your access keys safely. You will not be able to access them again after closing this window."); this.base(arguments, caption, infoText); this.set({ diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/MemberListItem.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/MemberListItem.js index 963126fe3f8..90fa5fbfb69 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/MemberListItem.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/MemberListItem.js @@ -40,7 +40,7 @@ qx.Class.define("osparc.desktop.wallets.MemberListItem", { // highlight me const email = osparc.auth.Data.getInstance().getEmail(); - if (email === value) { + if (value && value.includes(email)) { this.addState("selected"); } }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletDetails.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletDetails.js index a1dde9756de..d55f6ebd35a 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletDetails.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletDetails.js @@ -66,7 +66,7 @@ qx.Class.define("osparc.desktop.wallets.WalletDetails", { const titleLayout = this.__titleLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); const prevBtn = new qx.ui.form.Button().set({ - toolTipText: this.tr("Back to Credit Accounts list"), + toolTipText: this.tr("Return to Credit Accounts list"), icon: "@FontAwesome5Solid/arrow-left/20", backgroundColor: "transparent" }); @@ -120,14 +120,13 @@ qx.Class.define("osparc.desktop.wallets.WalletDetails", { }; osparc.data.Resources.fetch("wallets", "put", params) .then(() => { - osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully edited")); + osparc.FlashMessenger.logAs(name + this.tr(" successfully edited")); const wallet = osparc.desktop.credits.Utils.getWallet(walletId); wallet.set(params.data); }) .catch(err => { - console.error(err); - const msg = err.message || (this.tr("Something went wrong editing ") + name); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const msg = this.tr("Something went wrong while editing ") + name; + osparc.FlashMessenger.logError(err, msg); }) .finally(() => { button.setFetching(false); diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js index 2c2a70a7f07..837de69ffe3 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js @@ -81,6 +81,7 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { control = new qx.ui.basic.Label().set({ font: "text-14" }); + control.bind("value", control, "toolTipText"); this._add(control, { row: 0, column: 0, @@ -140,9 +141,8 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { osparc.data.Resources.fetch("wallets", "put", params) .then(() => found.setStatus(newStatus)) .catch(err => { - console.error(err); - const msg = err.message || (this.tr("Something went wrong updating the state")); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const msg = this.tr("Something went wrong while updating the state"); + osparc.FlashMessenger.logError(err, msg); }); } }, this); diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js index c8f1198eabd..5ed1afa1372 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js @@ -199,14 +199,13 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { }; osparc.data.Resources.fetch("wallets", "put", params) .then(() => { - osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully edited")); + osparc.FlashMessenger.logAs(name + this.tr(" successfully edited")); const wallet = osparc.desktop.credits.Utils.getWallet(walletId); wallet.set(params.data); }) .catch(err => { - console.error(err); - const msg = err.message || this.tr("Something went wrong updating the Credit Account"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const msg = this.tr("Something went wrong while updating the Credit Account"); + osparc.FlashMessenger.logError(err, msg); }) .finally(() => { walletEditor.setIsFetching(false); diff --git a/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js b/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js index a8785cebc8a..2d08dc0bcf6 100644 --- a/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js @@ -31,7 +31,7 @@ qx.Class.define("osparc.editor.MarkdownEditor", { this.getChildControl("preview-markdown"); this.getChildControl("subtitle").set({ - value: this.tr("Supports Markdown") + value: this.tr("Markdown supported") }); }, diff --git a/services/static-webserver/client/source/class/osparc/editor/ThumbnailEditor.js b/services/static-webserver/client/source/class/osparc/editor/ThumbnailEditor.js index 227ecfe61ff..c345b376263 100644 --- a/services/static-webserver/client/source/class/osparc/editor/ThumbnailEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/ThumbnailEditor.js @@ -65,7 +65,7 @@ qx.Class.define("osparc.editor.ThumbnailEditor", { sanitizeUrl: function(dirty) { const clean = osparc.wrapper.DOMPurify.getInstance().sanitize(dirty); if ((dirty && dirty !== clean) || (clean !== "" && !osparc.utils.Utils.isValidHttpUrl(clean))) { - osparc.FlashMessenger.getInstance().logAs(qx.locale.Manager.tr("Error checking link"), "WARNING"); + osparc.FlashMessenger.logAs(qx.locale.Manager.tr("Error checking link"), "WARNING"); return null; } return clean; diff --git a/services/static-webserver/client/source/class/osparc/editor/ThumbnailSuggestions.js b/services/static-webserver/client/source/class/osparc/editor/ThumbnailSuggestions.js index 188485676ac..c51dace40c5 100644 --- a/services/static-webserver/client/source/class/osparc/editor/ThumbnailSuggestions.js +++ b/services/static-webserver/client/source/class/osparc/editor/ThumbnailSuggestions.js @@ -209,15 +209,18 @@ qx.Class.define("osparc.editor.ThumbnailSuggestions", { this.setSuggestions(this.__thumbnails); }, - thumbnailTapped: function(thumbnail) { + __thumbnailTapped: function(thumbnail) { + // reset decoration + const unselectedBorderColor = qx.theme.manager.Color.getInstance().resolve("text"); + const unselectedBGColor = qx.theme.manager.Color.getInstance().resolve("fab-background"); this.getChildren().forEach(thumbnailImg => { - osparc.utils.Utils.updateBorderColor(thumbnailImg, qx.theme.manager.Color.getInstance().resolve("box-shadow")); - osparc.utils.Utils.addBackground(thumbnailImg, qx.theme.manager.Color.getInstance().resolve("fab-background")); + osparc.utils.Utils.updateBorderColor(thumbnailImg, unselectedBorderColor); + osparc.utils.Utils.addBackground(thumbnailImg, unselectedBGColor); }); - const color = qx.theme.manager.Color.getInstance().resolve("background-selected-dark"); - const bgColor = qx.theme.manager.Color.getInstance().resolve("background-selected"); - osparc.utils.Utils.updateBorderColor(thumbnail, color); - osparc.utils.Utils.addBackground(thumbnail, bgColor); + const selectedBorderColor = qx.theme.manager.Color.getInstance().resolve("strong-main"); + const selectedBGColor = qx.theme.manager.Color.getInstance().resolve("background-selected"); + osparc.utils.Utils.updateBorderColor(thumbnail, selectedBorderColor); + osparc.utils.Utils.addBackground(thumbnail, selectedBGColor); this.fireDataEvent("thumbnailTapped", { type: thumbnail.thumbnailType || "templateThumbnail", source: thumbnail.thumbnailFileUrl || thumbnail.getSource() @@ -238,9 +241,7 @@ qx.Class.define("osparc.editor.ThumbnailSuggestions", { thumbnail.thumbnailFileUrl = suggestion.fileUrl || suggestion; thumbnail.addListener("mouseover", () => thumbnail.set({decorator: "thumbnail-selected"}), this); thumbnail.addListener("mouseout", () => thumbnail.set({decorator: "thumbnail"}), this); - thumbnail.addListener("tap", () => { - this.thumbnailTapped(thumbnail); - }, this); + thumbnail.addListener("tap", () => this.__thumbnailTapped(thumbnail), this); this.add(thumbnail); }); } diff --git a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js index 4e151d4e433..ff557ddc59e 100644 --- a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js @@ -221,10 +221,7 @@ qx.Class.define("osparc.editor.WorkspaceEditor", { }; osparc.store.Workspaces.getInstance().putWorkspace(this.getWorkspace().getWorkspaceId(), updateData) .then(() => this.fireEvent("workspaceUpdated")) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => editButton.setFetching(false)); } }, @@ -233,10 +230,7 @@ qx.Class.define("osparc.editor.WorkspaceEditor", { if (this.__creatingWorkspace) { osparc.store.Workspaces.getInstance().deleteWorkspace(this.getWorkspace().getWorkspaceId()) .then(() => this.fireEvent("workspaceDeleted")) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); } this.fireEvent("cancel"); }, diff --git a/services/static-webserver/client/source/class/osparc/file/FileDownloadLink.js b/services/static-webserver/client/source/class/osparc/file/FileDownloadLink.js index bbb10785079..637257b0c1e 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileDownloadLink.js +++ b/services/static-webserver/client/source/class/osparc/file/FileDownloadLink.js @@ -45,7 +45,7 @@ qx.Class.define("osparc.file.FileDownloadLink", { this.fireDataEvent("fileLinkAdded", downloadLink); } else { downloadLinkField.resetValue(); - osparc.FlashMessenger.getInstance().logAs(this.tr("Error checking link"), "WARNING"); + osparc.FlashMessenger.logAs(this.tr("An issue occurred while checking link"), "WARNING"); } }, this); }, diff --git a/services/static-webserver/client/source/class/osparc/file/FileDrop.js b/services/static-webserver/client/source/class/osparc/file/FileDrop.js index 106a6c66b51..92b4f15206c 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileDrop.js +++ b/services/static-webserver/client/source/class/osparc/file/FileDrop.js @@ -39,7 +39,7 @@ qx.Class.define("osparc.file.FileDrop", { let msg = "
"; const options = [ this.tr("Upload file"), - this.tr("Drop file from explorer"), + this.tr("Drop file from File Explorer"), this.tr("Drop file from tree"), this.tr("Provide Link") ]; @@ -104,7 +104,7 @@ qx.Class.define("osparc.file.FileDrop", { return files; }, - ONE_FILE_ONLY: qx.locale.Manager.tr("Only one file at a time is accepted.") + "
" + qx.locale.Manager.tr("Please zip all files together."), + ONE_FILE_ONLY: qx.locale.Manager.tr("Only one file can be uploaded at a time.") + "
" + qx.locale.Manager.tr("Please compress all files into a single zip file."), }, events: { @@ -301,10 +301,10 @@ qx.Class.define("osparc.file.FileDrop", { pos: this.__pointerFileEventToScreenPos(e) }); } else { - osparc.FlashMessenger.getInstance().logAs(osparc.file.FileDrop.ONE_FILE_ONLY, "ERROR"); + osparc.FlashMessenger.logError(osparc.file.FileDrop.ONE_FILE_ONLY); } } else { - osparc.FlashMessenger.getInstance().logAs(this.tr("Folders are not accepted. You might want to upload a zip file."), "ERROR"); + osparc.FlashMessenger.logError(this.tr("Folders are not accepted. Please upload a zip file instead.")); } } }, diff --git a/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js b/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js index 43113f880e2..cdd8c0c0e5f 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js +++ b/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js @@ -108,23 +108,11 @@ qx.Class.define("osparc.file.FileLabelWithActions", { setItemSelected: function(selectedItem) { if (selectedItem) { + this.__selection = [selectedItem]; const isFile = osparc.file.FilesTree.isFile(selectedItem); this.getChildControl("download-button").setEnabled(isFile); - this.getChildControl("delete-button").setEnabled(isFile); - const selectedLabel = this.getChildControl("selected-label"); - if (isFile) { - this.__selection = [selectedItem]; - selectedLabel.set({ - value: selectedItem.getLabel(), - toolTipText: selectedItem.getFileId() - }); - } else { - this.__selection = []; - selectedLabel.set({ - value: "", - toolTipText: "" - }); - } + this.getChildControl("delete-button").setEnabled(true); // folders can also be deleted + this.getChildControl("selected-label").setValue(selectedItem.getLabel()); } else { this.resetSelection(); } @@ -138,7 +126,7 @@ qx.Class.define("osparc.file.FileLabelWithActions", { } else { const selectedLabel = this.getChildControl("selected-label"); selectedLabel.set({ - value: multiSelectionData.length + " files" + value: multiSelectionData.length + " items" }); } } else { @@ -168,60 +156,87 @@ qx.Class.define("osparc.file.FileLabelWithActions", { } }, + __retrieveURLAndDownloadFile: function(file) { + const fileId = file.getFileId(); + const locationId = file.getLocation(); + osparc.utils.Utils.retrieveURLAndDownload(locationId, fileId) + .then(data => { + if (data) { + osparc.DownloadLinkTracker.getInstance().downloadLinkUnattended(data.link, data.fileName); + } + }); + }, + __deleteSelected: function() { + const toBeDeleted = []; + let isFolderSelected = false; if (this.isMultiSelect()) { - const requests = []; this.__selection.forEach(selection => { - if (selection && osparc.file.FilesTree.isFile(selection)) { - const request = this.__deleteFile(selection); - if (request) { - requests.push(request); + if (selection) { + toBeDeleted.push(selection); + if (osparc.file.FilesTree.isDir(selection)) { + isFolderSelected = true; } } }); - Promise.all(requests) - .then(datas => { - if (datas.length) { - this.fireDataEvent("fileDeleted", datas[0]); - osparc.FlashMessenger.getInstance().logAs(this.tr("Files successfully deleted"), "INFO"); - } - }); - requests } else if (this.__selection.length) { const selection = this.__selection[0]; - if (selection && osparc.file.FilesTree.isFile(selection)) { - const request = this.__deleteFile(selection); - if (request) { - request - .then(data => { - this.fireDataEvent("fileDeleted", data); - osparc.FlashMessenger.getInstance().logAs(this.tr("File successfully deleted"), "INFO"); - }); + if (selection) { + toBeDeleted.push(selection); + if (osparc.file.FilesTree.isDir(selection)) { + isFolderSelected = true; } } } + + let msg = this.tr("This action cannot be undone."); + msg += isFolderSelected ? ("
"+this.tr("All contents within the folders will be deleted.")) : ""; + msg += "
" + this.tr("Do you want to proceed?"); + const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Delete"), + confirmText: this.tr("Delete"), + confirmAction: "delete" + }); + confirmationWin.center(); + confirmationWin.open(); + confirmationWin.addListener("close", () => { + if (confirmationWin.getConfirmed()) { + this.__doDeleteSelected(toBeDeleted); + } + }, this); }, - __retrieveURLAndDownloadFile: function(file) { - const fileId = file.getFileId(); - const locationId = file.getLocation(); - osparc.utils.Utils.retrieveURLAndDownload(locationId, fileId) - .then(data => { - if (data) { - osparc.DownloadLinkTracker.getInstance().downloadLinkUnattended(data.link, data.fileName); + __doDeleteSelected: function(toBeDeleted) { + const requests = []; + toBeDeleted.forEach(selection => { + if (selection) { + let request = null; + if (osparc.file.FilesTree.isFile(selection)) { + request = this.__deleteItem(selection.getFileId(), selection.getLocation()); + } else { + request = this.__deleteItem(selection.getPath(), selection.getLocation()); + } + if (request) { + requests.push(request); + } + } + }); + Promise.all(requests) + .then(datas => { + if (datas.length) { + this.fireDataEvent("fileDeleted", datas[0]); + osparc.FlashMessenger.logAs(this.tr("Items successfully deleted"), "INFO"); } }); }, - __deleteFile: function(file) { - const fileId = file.getFileId(); - const locationId = file.getLocation(); + __deleteItem: function(itemId, locationId) { if (locationId !== 0 && locationId !== "0") { - osparc.FlashMessenger.getInstance().logAs(this.tr("Only files in simcore.s3 can be deleted")); + osparc.FlashMessenger.logAs(this.tr("Externally managed items cannot be deleted")); return null; } const dataStore = osparc.store.Data.getInstance(); - return dataStore.deleteFile(locationId, fileId); + return dataStore.deleteFile(locationId, itemId); }, } }); diff --git a/services/static-webserver/client/source/class/osparc/file/FilePicker.js b/services/static-webserver/client/source/class/osparc/file/FilePicker.js index fdce6e4aec9..65687d4f60a 100644 --- a/services/static-webserver/client/source/class/osparc/file/FilePicker.js +++ b/services/static-webserver/client/source/class/osparc/file/FilePicker.js @@ -39,28 +39,22 @@ qx.Class.define("osparc.file.FilePicker", { /** * @param node {osparc.data.model.Node} Node owning the widget */ - construct: function(node, pageContext = "workbench") { + construct: function(node, viewContext = "workbench") { this.base(arguments); this._setLayout(new qx.ui.layout.VBox(20)); this.set({ node, - pageContext }); - this.__buildLayout(); + this.__buildLayout(viewContext); }, properties: { node: { check: "osparc.data.model.Node" }, - - pageContext: { - check: ["workbench", "guided", "app"], - nullable: false - } }, events: { @@ -117,7 +111,7 @@ qx.Class.define("osparc.file.FilePicker", { return osparc.file.FilePicker.isOutputFromStore(outputs) || osparc.file.FilePicker.isOutputDownloadLink(outputs); }, - setOutputValue: function(node, outputValue) { + __setOutputValue: function(node, outputValue) { node.setOutputData({ "outFile": outputValue }); @@ -133,7 +127,7 @@ qx.Class.define("osparc.file.FilePicker", { setOutputValueFromStore: function(node, store, dataset, path, label) { if (store !== undefined && path) { - osparc.file.FilePicker.setOutputValue(node, { + this.__setOutputValue(node, { store, dataset, path, @@ -144,7 +138,7 @@ qx.Class.define("osparc.file.FilePicker", { setOutputValueFromLink: function(node, downloadLink, label) { if (downloadLink) { - osparc.file.FilePicker.setOutputValue(node, { + this.__setOutputValue(node, { downloadLink, label: label ? label : "" }); @@ -152,7 +146,7 @@ qx.Class.define("osparc.file.FilePicker", { }, resetOutputValue: function(node) { - osparc.file.FilePicker.setOutputValue(node, null); + this.__setOutputValue(node, null); }, getOutputFileMetadata: function(node) { @@ -161,17 +155,19 @@ qx.Class.define("osparc.file.FilePicker", { const params = { url: { locationId: outValue.store, - datasetId: outValue.dataset + path: outValue.path } }; - osparc.data.Resources.fetch("storageFiles", "getByLocationAndDataset", params) - .then(files => { - const fileMetadata = files.find(file => file.file_id === outValue.path); - if (fileMetadata) { - resolve(fileMetadata); - } else { - reject(); + osparc.data.Resources.fetch("storagePaths", "getPaths", params) + .then(pagResp => { + if (pagResp["items"]) { + const file = pagResp["items"].find(item => item.path === outValue.path); + if (file) { + resolve(file["file_meta_data"]); + return; + } } + reject(); }) .catch(() => reject()); }); @@ -280,19 +276,18 @@ qx.Class.define("osparc.file.FilePicker", { if (this.__filesTree) { this.__selectedFileFound = false; this.__filesTree.resetCache(); - this.__filesTree.populateTree(); + this.__filesTree.populateLocations(); } }, - __buildLayout: function() { + __buildLayout: function(viewContext) { this._removeAll(); const hasOutput = osparc.file.FilePicker.hasOutputAssigned(this.getNode().getOutputs()); if (hasOutput) { this.__buildInfoLayout(); } else { this.__addProgressBar(); - const isWorkbenchContext = this.getPageContext() === "workbench"; - if (isWorkbenchContext) { + if (viewContext === "workbench") { this.__buildWorkbenchLayout(); } else { this.setMargin(10); @@ -421,14 +416,25 @@ qx.Class.define("osparc.file.FilePicker", { if (files.length === 1) { const fileUploader = new osparc.file.FileUploader(this.getNode()); fileUploader.addListener("uploadAborted", () => this.__resetOutput()); - fileUploader.addListener("fileUploaded", () => { + fileUploader.addListener("fileUploaded", e => { + const fileMetadata = e.getData(); + if ( + "location" in fileMetadata && + "dataset" in fileMetadata && + "path" in fileMetadata && + "name" in fileMetadata + ) { + osparc.file.FilePicker.setOutputValueFromStore(this.getNode(), fileMetadata["location"], fileMetadata["dataset"], fileMetadata["path"], fileMetadata["name"]); + } else { + console.error("metadata info missing", fileMetadata); + } this.fireEvent("fileUploaded"); this.getNode().fireEvent("fileUploaded"); }, this); fileUploader.retrieveUrlAndUpload(files[0]); return true; } - osparc.FlashMessenger.getInstance().logAs(osparc.file.FileDrop.ONE_FILE_ONLY, "ERROR"); + osparc.FlashMessenger.logError(osparc.file.FileDrop.ONE_FILE_ONLY); } return false; }, @@ -545,8 +551,8 @@ qx.Class.define("osparc.file.FilePicker", { flex: 1 }); treeFolderLayout.add(treeLayout, 0); - const allowMultiselection = false; - const folderViewer = new osparc.file.FolderViewer(allowMultiselection); + const allowMultiSelection = false; + const folderViewer = new osparc.file.FolderViewer(allowMultiSelection); treeFolderLayout.add(folderViewer, 1); filesTree.addListener("selectionChanged", () => { @@ -581,9 +587,9 @@ qx.Class.define("osparc.file.FilePicker", { folderViewer.setFolder(parent); } }, this); - folderViewer.addListener("requestDatasetFiles", e => { + folderViewer.addListener("requestPathItems", e => { const data = e.getData(); - filesTree.requestDatasetFiles(data.locationId, data.datasetId); + filesTree.requestPathItems(data.locationId, data.path); }, this); const selectBtn = this.__selectButton = new qx.ui.form.Button(this.tr("Select")).set({ diff --git a/services/static-webserver/client/source/class/osparc/file/FileTreeItem.js b/services/static-webserver/client/source/class/osparc/file/FileTreeItem.js index 5e7a4a02236..7ef50a49ed4 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileTreeItem.js +++ b/services/static-webserver/client/source/class/osparc/file/FileTreeItem.js @@ -48,19 +48,21 @@ qx.Class.define("osparc.file.FileTreeItem", { this.set({ indent: 12, // defaults to 19, decorator: "rounded", + alignY: "middle", }); - - // create a date format like "Oct. 19, 2018 11:31 AM" - this._dateFormat = new qx.util.format.DateFormat( - qx.locale.Date.getDateFormat("medium") + " " + - qx.locale.Date.getTimeFormat("short") - ); }, properties: { + loaded: { + check: "Boolean", + event: "changeLoaded", + init: true, + nullable: false + }, + location: { check: "String", - event: "changePath", + event: "changeLocation", nullable: true }, @@ -70,6 +72,12 @@ qx.Class.define("osparc.file.FileTreeItem", { nullable: true }, + displayPath: { + check: "String", + event: "changeDisplayPath", + nullable: true + }, + pathLabel: { check: "Array", event: "changePathLabel", @@ -79,30 +87,16 @@ qx.Class.define("osparc.file.FileTreeItem", { itemId: { check: "String", event: "changeItemId", - apply: "_applyItemId", + apply: "__applyItemId", nullable: true }, - isDataset: { - check: "Boolean", - event: "changeIsDataset", - init: false, - nullable: false - }, - datasetId: { check: "String", event: "changeDatasetId", nullable: true }, - loaded: { - check: "Boolean", - event: "changeLoaded", - init: true, - nullable: false - }, - fileId: { check: "String", event: "changeFileId", @@ -119,12 +113,17 @@ qx.Class.define("osparc.file.FileTreeItem", { check: "String", event: "changeSize", nullable: true - } - }, + }, - members: { // eslint-disable-line qx-rules/no-refs-in-members - _dateFormat: null, + type: { + check: ["folder", "file", "loading"], + event: "changeType", + init: null, + nullable: false, + }, + }, + members: { // overridden _addWidgets: function() { // Here's our indentation and tree-lines @@ -144,70 +143,36 @@ qx.Class.define("osparc.file.FileTreeItem", { // Add lastModified const lastModifiedWidget = new qx.ui.basic.Label().set({ - width: 140, maxWidth: 140, - textAlign: "right" + textAlign: "right", + alignY: "middle", + paddingLeft: 10, }); - let that = this; this.bind("lastModified", lastModifiedWidget, "value", { - converter: function(value) { - if (value === null) { - return ""; - } - const date = new Date(value); - return that._dateFormat.format(date); // eslint-disable-line no-underscore-dangle - } + converter: value => value ? osparc.utils.Utils.formatDateAndTime(new Date(value)) : "" }); this.addWidget(lastModifiedWidget); // Add size const sizeWidget = new qx.ui.basic.Label().set({ - width: 70, - maxWidth: 70, - textAlign: "right" + maxWidth: 90, + textAlign: "right", + alignY: "middle", + paddingLeft: 10, }); this.bind("size", sizeWidget, "value", { - converter: function(value) { - if (value === null) { - return ""; - } - return osparc.utils.Utils.bytesToSize(value); - } + converter: value => value ? osparc.utils.Utils.bytesToSize(value) : "" }); this.addWidget(sizeWidget); - - - const permissions = osparc.data.Permissions.getInstance(); - // Add Path - const pathWidget = new qx.ui.basic.Label().set({ - width: 300, - maxWidth: 300, - textAlign: "right" - }); - this.bind("path", pathWidget, "value"); - this.addWidget(pathWidget); - permissions.bind("role", pathWidget, "visibility", { - converter: () => permissions.canDo("study.nodestree.uuid.read") ? "visible" : "excluded" - }); - - // Add NodeId - const fileIdWidget = new qx.ui.basic.Label().set({ - width: 300, - maxWidth: 300, - textAlign: "right" - }); - this.bind("fileId", fileIdWidget, "value"); - this.addWidget(fileIdWidget); - permissions.bind("role", fileIdWidget, "visibility", { - converter: () => permissions.canDo("study.nodestree.uuid.read") ? "visible" : "excluded" - }); }, - // override - _applyItemId: function(value, old) { - osparc.utils.Utils.setIdToWidget(this, "fileTreeItem_" + value); + __applyItemId: function(value, old) { + if (value) { + osparc.utils.Utils.setIdToWidget(this, "fileTreeItem_" + value); + } }, + // override _applyIcon: function(value, old) { this.base(arguments, value, old); const icon = this.getChildControl("icon", true); @@ -222,9 +187,4 @@ qx.Class.define("osparc.file.FileTreeItem", { } } }, - - destruct: function() { - this._dateFormat.dispose(); - this._dateFormat = null; - } }); diff --git a/services/static-webserver/client/source/class/osparc/file/FileUploader.js b/services/static-webserver/client/source/class/osparc/file/FileUploader.js index 415d8c3ecdb..a93e94b70ba 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileUploader.js +++ b/services/static-webserver/client/source/class/osparc/file/FileUploader.js @@ -39,7 +39,7 @@ qx.Class.define("osparc.file.FileUploader", { events: { "uploadAborted": "qx.event.type.Event", - "fileUploaded": "qx.event.type.Event" + "fileUploaded": "qx.event.type.Data", }, statics: { @@ -60,6 +60,7 @@ qx.Class.define("osparc.file.FileUploader", { members: { __presignedLinkData: null, __uploadedParts: null, + __fileMetadata: null, // Request to the server an upload URL. retrieveUrlAndUpload: function(file) { @@ -80,6 +81,14 @@ qx.Class.define("osparc.file.FileUploader", { .then(presignedLinkData => { if (presignedLinkData.resp.urls) { this.__presignedLinkData = presignedLinkData; + + this.__fileMetadata = { + location: presignedLinkData.locationId, + dataset: studyId, + path: presignedLinkData.fileUuid, + name: file.name + }; + try { this.__uploadFile(file); } catch (error) { @@ -124,7 +133,7 @@ qx.Class.define("osparc.file.FileUploader", { const nProgress = Math.min(Math.max(100*progress-min, min), max); this.getNode().getStatus().setProgress(nProgress); if (this.__uploadedParts.every(uploadedPart => uploadedPart["e_tag"] !== null)) { - this.__checkCompleteUpload(file); + this.__checkCompleteUpload(); } } } catch (err) { @@ -153,7 +162,7 @@ qx.Class.define("osparc.file.FileUploader", { }, // Use XMLHttpRequest to complete the upload to S3 - __checkCompleteUpload: function(file) { + __checkCompleteUpload: function() { if (this.getNode()["fileUploadAbortRequested"]) { this.__abortUpload(); return; @@ -162,29 +171,21 @@ qx.Class.define("osparc.file.FileUploader", { const presignedLinkData = this.__presignedLinkData; this.getNode().getStatus().setProgress(this.self().PROGRESS_VALUES.COMPLETING); const completeUrl = presignedLinkData.resp.links.complete_upload; - const location = presignedLinkData.locationId; - const path = presignedLinkData.fileUuid; const xhr = new XMLHttpRequest(); xhr.onloadend = () => { - const fileMetadata = { - location, - dataset: this.getNode().getStudy().getUuid(), - path, - name: file.name - }; const resp = JSON.parse(xhr.responseText); if ("error" in resp && resp["error"]) { console.error(resp["error"]); this.__abortUpload(); } else if ("data" in resp) { if (xhr.status == 202) { - console.log("waiting for completion", file.name); + console.log("waiting for completion", this.__fileMetadata.name); // @odeimaiz: we need to poll the received new location in the response // we do have links.state -> poll that link until it says ok // right now this kind of work if files are small and this happens fast - this.__pollFileUploadState(resp["data"]["links"]["state"], fileMetadata); + this.__pollFileUploadState(resp["data"]["links"]["state"]); } else if (xhr.status == 200) { - this.__completeUpload(fileMetadata); + this.__completeUpload(); } } }; @@ -196,30 +197,27 @@ qx.Class.define("osparc.file.FileUploader", { xhr.send(JSON.stringify(body)); }, - __pollFileUploadState: function(stateLink, fileMetadata) { + __pollFileUploadState: function(stateLink) { const xhr = new XMLHttpRequest(); xhr.open("POST", stateLink, true); xhr.setRequestHeader("Content-Type", "application/json"); xhr.onloadend = () => { const resp = JSON.parse(xhr.responseText); if ("data" in resp && resp["data"] && resp["data"]["state"] === "ok") { - this.__completeUpload(fileMetadata); + this.__completeUpload(); } else { const interval = 2000; - qx.event.Timer.once(() => this.__pollFileUploadState(stateLink, fileMetadata), this, interval); + qx.event.Timer.once(() => this.__pollFileUploadState(stateLink), this, interval); } }; xhr.send(); }, - __completeUpload: function(fileMetadata) { + __completeUpload: function() { this.getNode()["fileUploadAbortRequested"] = false; - if ("location" in fileMetadata && "dataset" in fileMetadata && "path" in fileMetadata && "name" in fileMetadata) { - osparc.file.FilePicker.setOutputValueFromStore(this.getNode(), fileMetadata["location"], fileMetadata["dataset"], fileMetadata["path"], fileMetadata["name"]); - } this.__presignedLinkData = null; - this.fireEvent("fileUploaded"); + this.fireDataEvent("fileUploaded", this.__fileMetadata); }, __abortUpload: function() { diff --git a/services/static-webserver/client/source/class/osparc/file/FilesTree.js b/services/static-webserver/client/source/class/osparc/file/FilesTree.js index a682bbca1c2..9b8a13a8265 100644 --- a/services/static-webserver/client/source/class/osparc/file/FilesTree.js +++ b/services/static-webserver/client/source/class/osparc/file/FilesTree.js @@ -43,23 +43,14 @@ qx.Class.define("osparc.file.FilesTree", { this.base(arguments, null, "label", "children"); this.set({ - openMode: "none", + openMode: "dbltap", decorator: "no-border", font: "text-14", }); - this.resetChecks(); + this.__resetChecks(); this.addListener("tap", this.__selectionChanged, this); - - // Listen to "Enter" key - this.addListener("keypress", keyEvent => { - if (keyEvent.getKeyIdentifier() === "Enter") { - this.__itemSelected(); - } - }, this); - - this.__loadPaths = {}; }, properties: { @@ -75,149 +66,122 @@ qx.Class.define("osparc.file.FilesTree", { }, events: { - "selectionChanged": "qx.event.type.Event", // tap - "itemSelected": "qx.event.type.Event", // dbltap + "selectionChanged": "qx.event.type.Event", "fileCopied": "qx.event.type.Data", "filesAddedToTree": "qx.event.type.Event" }, statics: { isDir: function(item) { - let isDir = false; - if (item["get"+qx.lang.String.firstUp("path")]) { - if (item.getPath() !== null) { - isDir = true; - } - } - return isDir; + return item.getType() === "folder"; }, isFile: function(item) { - let isFile = false; - if (item["set"+qx.lang.String.firstUp("fileId")]) { - isFile = true; - } - return isFile; + return item.getType() === "file"; }, addLoadingChild: function(parent) { - const loadingModel = qx.data.marshal.Json.createModel({ - label: "Loading...", - location: null, - path: null, - children: [], - icon: "@FontAwesome5Solid/circle-notch/12" - }, true); + const loadingData = osparc.data.Converters.createLoadingEntry(); + const loadingModel = qx.data.marshal.Json.createModel(loadingData, true); parent.getChildren().append(loadingModel); }, removeLoadingChild: function(parent) { for (let i = parent.getChildren().length - 1; i >= 0; i--) { - if (parent.getChildren().toArray()[i].getLabel() === "Loading...") { - parent.getChildren().toArray() - .splice(i, 1); + if (parent.getChildren().toArray()[i].getType() === "loading") { + parent.getChildren().toArray().splice(i, 1); } } }, - - attachPathLabel: function(srcPathLabel, data) { - data["pathLabel"] = srcPathLabel.concat(data["label"]); - if ("children" in data) { - data.children.forEach(child => this.self().attachPathLabel(data["pathLabel"], child)); - } - } }, members: { __locations: null, - __datasets: null, + __pathModels: null, __loadPaths: null, - resetChecks: function() { - this.__locations = new Set(); - this.__datasets = new Set(); - }, - resetCache: function() { - this.resetChecks(); + this.__resetChecks(); const dataStore = osparc.store.Data.getInstance(); dataStore.resetCache(); }, + populateLocations: function() { + this.__resetChecks(); + + const treeName = "My Data"; + this.__resetTree(treeName); + const rootModel = this.getModel(); + rootModel.getChildren().removeAll(); + this.self().addLoadingChild(rootModel); + + this.set({ + hideRoot: true + }); + const dataStore = osparc.store.Data.getInstance(); + return dataStore.getLocations() + .then(locations => { + const datasetPromises = []; + if (this.__locations.size === 0) { + this.__resetChecks(); + this.__locationsToRoot(locations); + for (let i=0; i { - const { - files - } = data; - - if (files.length && "project_name" in files[0]) { - this.__resetTree(files[0]["project_name"]); + const locationId = 0; + const path = studyId; + return dataStore.getItemsByLocationAndPath(locationId, path) + .then(items => { + if (items.length) { + const studyName = osparc.data.Converters.displayPathToLabel(items[0]["display_path"], { first: true }); + studyModel.setLabel(studyName); } - studyModel = this.getModel(); - this.__filesToDataset("0", studyId, files, studyModel); + this.__itemsToTree(locationId, path, items, studyModel); + + this.setSelection(new qx.data.Array([studyModel])); + this.__selectionChanged(); }); }, - populateNodeTree(nodeId) { + populateNodeTree(studyId, nodeId) { const treeName = "Node Files"; this.__resetTree(treeName); - const rootModel = this.getModel(); - this.self().addLoadingChild(rootModel); + const nodeModel = this.getModel(); + this.self().addLoadingChild(nodeModel); const dataStore = osparc.store.Data.getInstance(); - return dataStore.getNodeFiles(nodeId) - .then(files => { - const newChildren = osparc.data.Converters.fromDSMToVirtualTreeModel(null, files); - if (newChildren.length && // location - newChildren[0].children.length && // study - newChildren[0].children[0].children.length) { // node - const nodeData = newChildren[0].children[0].children[0]; - const nodeTreeName = nodeData.label; - this.__resetTree(nodeTreeName, nodeId); - const rootNodeModel = this.getModel(); - if (nodeData.children.length) { - const nodeItemsOnly = nodeData.children; - this.__itemsToNode(nodeItemsOnly); - } - this.openNode(rootNodeModel); + const locationId = 0; + const path = encodeURIComponent(studyId) + "/" + encodeURIComponent(nodeId); + return dataStore.getItemsByLocationAndPath(locationId, path) + .then(items => { + this.__itemsToTree(0, path, items, nodeModel); - const selected = new qx.data.Array([rootNodeModel]); - this.setSelection(selected); - this.__selectionChanged(); - } else { - rootModel.getChildren().removeAll(); - } + this.setSelection(new qx.data.Array([nodeModel])); + this.__selectionChanged(); }); }, - populateTree: function() { - return this.__populateLocations(); - }, - loadFilePath: function(outFileVal) { const locationId = outFileVal.store; + const path = outFileVal.path; let datasetId = "dataset" in outFileVal ? outFileVal.dataset : null; - const pathId = outFileVal.path; if (datasetId === null) { - const splitted = pathId.split("/"); - if (splitted.length === 3) { - // simcore.s3 - datasetId = splitted[0]; - } + datasetId = osparc.data.Converters.pathToDatasetId(path); } - this.__addToLoadFilePath(locationId, datasetId, pathId); - this.__populateLocations(); - }, - - __addToLoadFilePath: function(locationId, datasetId, pathId) { if (datasetId) { if (!(locationId in this.__loadPaths)) { this.__loadPaths[locationId] = {}; @@ -225,32 +189,24 @@ qx.Class.define("osparc.file.FilesTree", { if (!(datasetId in this.__loadPaths[locationId])) { this.__loadPaths[locationId][datasetId] = new Set(); } - this.__loadPaths[locationId][datasetId].add(pathId); + this.__loadPaths[locationId][datasetId].add(path); } - }, - __hasLocationNeedToBeLoaded: function(locationId) { - return (locationId in this.__loadPaths) && (Object.keys(this.__loadPaths[locationId]).length > 0); + this.populateLocations(); }, - __hasDatasetNeedToBeLoaded: function(locationId, datasetId) { - return (locationId in this.__loadPaths) && (datasetId in this.__loadPaths[locationId]) && (this.__loadPaths[locationId][datasetId].size > 0); + requestPathItems: function(locationId, path) { + const dataStore = osparc.store.Data.getInstance(); + return dataStore.getItemsByLocationAndPath(locationId, path) + .then(items => { + return this.__itemsToTree(locationId, path, items); + }); }, - __filesReceived: function(locationId, datasetId, files) { - if (this.__hasDatasetNeedToBeLoaded(locationId, datasetId)) { - const paths = Array.from(this.__loadPaths[locationId][datasetId]); - for (let i=0; i { c.bindDefaultProperties(item, id); c.bindProperty("itemId", "itemId", null, item, id); + c.bindProperty("displayPath", "displayPath", null, item, id); c.bindProperty("fileId", "fileId", null, item, id); c.bindProperty("location", "location", null, item, id); - c.bindProperty("isDataset", "isDataset", null, item, id); c.bindProperty("datasetId", "datasetId", null, item, id); c.bindProperty("loaded", "loaded", null, item, id); c.bindProperty("path", "path", null, item, id); @@ -282,64 +239,35 @@ qx.Class.define("osparc.file.FilesTree", { c.bindProperty("lastModified", "lastModified", null, item, id); c.bindProperty("size", "size", null, item, id); c.bindProperty("icon", "icon", null, item, id); + c.bindProperty("type", "type", null, item, id); }, configureItem: item => { - const openButton = item.getChildControl("open"); - openButton.addListener("tap", () => { - if (item.isOpen() && !item.getLoaded() && item.getIsDataset()) { + item.addListener("changeOpen", e => { + if (e.getData() && !item.getLoaded()) { item.setLoaded(true); const locationId = item.getLocation(); - const datasetId = item.getPath(); - this.requestDatasetFiles(locationId, datasetId); + const path = item.getPath(); + this.requestPathItems(locationId, path); } }, this); - item.addListener("dbltap", () => this.__itemSelected(), this); this.__addDragAndDropMechanisms(item); } }); }, - __populateLocations: function() { - this.resetChecks(); - - const treeName = "My Data"; - this.__resetTree(treeName); - const rootModel = this.getModel(); - rootModel.getChildren().removeAll(); - this.self().addLoadingChild(rootModel); - - this.set({ - hideRoot: true - }); - const dataStore = osparc.store.Data.getInstance(); - return dataStore.getLocations() - .then(locations => { - const datasetPromises = []; - if (this.__locations.size === 0) { - this.resetChecks(); - this.__locationsToRoot(locations); - for (let i=0; i 0); + }, + + __hasDatasetNeedToBeLoaded: function(locationId, datasetId) { + return (locationId in this.__loadPaths) && (datasetId in this.__loadPaths[locationId]) && (this.__loadPaths[locationId][datasetId].size > 0); + }, + + __filesReceived: function(locationId, datasetId, files) { + if (this.__hasDatasetNeedToBeLoaded(locationId, datasetId)) { + const paths = Array.from(this.__loadPaths[locationId][datasetId]); + for (let i=0; i { const { location, - datasets + items, } = data; if (location === locationId && !this.__locations.has(locationId)) { - this.__datasetsToLocation(location, datasets); + this.__itemsToLocation(location, items); } }); }, - requestDatasetFiles: function(locationId, datasetId) { - if (this.__datasets.has(datasetId)) { - return null; - } - - const dataStore = osparc.store.Data.getInstance(); - return dataStore.getFilesByLocationAndDataset(locationId, datasetId) - .then(data => { - const { - location, - dataset, - files - } = data; - this.__filesToDataset(location, dataset, files); - }); - }, - __getLocationModel: function(locationId) { const rootModel = this.getModel(); const locationModels = rootModel.getChildren(); @@ -401,34 +336,77 @@ qx.Class.define("osparc.file.FilesTree", { return null; }, - __getDatasetModel: function(locationId, datasetId) { - const locationModel = this.__getLocationModel(locationId); - const datasetModels = locationModel.getChildren(); - for (let i=0; i entry["locationId"] == locationId && entry["path"] === path); + if (modelFound) { + return modelFound["model"]; } return null; }, - __itemsToNode: function(files) { - const currentModel = this.getModel(); - this.self().removeLoadingChild(currentModel); + __createModel: function(locationId, path, data) { + const model = qx.data.marshal.Json.createModel(data, true); + this.__pathModels.push({ + locationId, + path, + model, + }); + return model; + }, - files.forEach(file => this.self().attachPathLabel(currentModel.getPathLabel(), file)); - const newModelToAdd = qx.data.marshal.Json.createModel(files, true); - currentModel.getChildren().append(newModelToAdd); - this.setModel(currentModel); - this.fireEvent("filesAddedToTree"); + __itemsToTree: function(locationId, path, items, parentModel) { + if (!parentModel) { + parentModel = this.__getModelFromPath(locationId, path); + } + if (parentModel) { + if ("setLoaded" in parentModel) { + parentModel.setLoaded(true); + } + parentModel.getChildren().removeAll(); + const itemModels = []; + items.forEach(item => { + if (item["file_meta_data"]) { + const datasetId = osparc.data.Converters.pathToDatasetId(path); + const data = osparc.data.Converters.createFileEntry( + item["display_path"], + locationId, + item["path"], + datasetId, + item["file_meta_data"], + ); + const model = this.__createModel(locationId, item["path"], data); + itemModels.push(model); + } else { + const data = osparc.data.Converters.createFolderEntry( + item["display_path"], + locationId, + item["path"] + ); + data.loaded = false; + const model = this.__createModel(locationId, item["path"], data); + itemModels.push(model); + this.__pathModels.push({ + locationId, + path: item["path"], + model, + }); + this.self().addLoadingChild(model); + } + }); + parentModel.getChildren().append(itemModels); + // sort files + osparc.data.Converters.sortModelByLabel(parentModel); - return newModelToAdd; - }, + this.__rerender(parentModel); - __datasetsToLocation: function(locationId, datasets) { - const dataStore = osparc.store.Data.getInstance(); + this.fireEvent("filesAddedToTree"); + } + this.__filesReceived(locationId, path, items); + return parentModel || null; + }, + + __itemsToLocation: function(locationId, items) { const locationModel = this.__getLocationModel(locationId); if (!locationModel) { return; @@ -436,71 +414,38 @@ qx.Class.define("osparc.file.FilesTree", { this.__locations.add(locationId); locationModel.getChildren().removeAll(); let openThis = null; - datasets.forEach(dataset => { - const datasetData = osparc.data.Converters.createDirEntry( - dataset.display_name, + const datasetItems = []; + items.forEach(item => { + const datasetData = osparc.data.Converters.createFolderEntry( + item["display_path"], locationId, - dataset.dataset_id + item["path"] ); - datasetData.isDataset = true; datasetData.loaded = false; datasetData["pathLabel"] = locationModel.getPathLabel().concat(datasetData["label"]); - const datasetModel = qx.data.marshal.Json.createModel(datasetData, true); + const datasetModel = this.__createModel(locationId, item["path"], datasetData); + datasetItems.push(datasetModel); this.self().addLoadingChild(datasetModel); - locationModel.getChildren().append(datasetModel); // add cached files - const datasetId = dataset.dataset_id; - const cachedData = dataStore.getFilesByLocationAndDatasetCached(locationId, datasetId); - if (cachedData) { - this.__filesToDataset(cachedData.location, cachedData.dataset, cachedData.files); - } - - if (this.__hasDatasetNeedToBeLoaded(locationId, datasetId)) { + const path = item["path"]; + if (this.__hasDatasetNeedToBeLoaded(locationId, path)) { openThis = datasetModel; } }); + locationModel.getChildren().append(datasetItems); // sort datasets osparc.data.Converters.sortModelByLabel(locationModel); this.__rerender(locationModel); if (openThis) { - const datasetId = openThis.getItemId(); + const path = openThis.getItemId(); this.openNodeAndParents(openThis); - this.requestDatasetFiles(locationId, datasetId); + this.requestPathItems(locationId, path); } }, - __filesToDataset: function(locationId, datasetId, files, model) { - if (this.__datasets.has(datasetId)) { - return; - } - - const datasetModel = model ? model : this.__getDatasetModel(locationId, datasetId); - if (datasetModel) { - datasetModel.getChildren().removeAll(); - if (files.length) { - const locationData = osparc.data.Converters.fromDSMToVirtualTreeModel(datasetId, files); - const datasetData = locationData[0].children; - datasetData[0].children.forEach(data => { - this.self().attachPathLabel(datasetModel.getPathLabel(), data); - const filesModel = qx.data.marshal.Json.createModel(data, true); - datasetModel.getChildren().append(filesModel); - }); - } - // sort files - osparc.data.Converters.sortModelByLabel(datasetModel); - - this.__rerender(datasetModel); - - this.__datasets.add(datasetId); - this.fireEvent("filesAddedToTree"); - } - - this.__filesReceived(locationId, datasetId, files); - }, - __rerender: function(item) { // Hack to trigger a rebuild of the item. // Without this sometimes the arrow giving access to the children is not rendered @@ -514,7 +459,7 @@ qx.Class.define("osparc.file.FilesTree", { const root = this.getModel(); const list = []; this.__getItemsInTree(root, list); - return list.find(element => element.getChildren && element.getChildren().contains(childItem)); + return list.find(element => element.getChildren && childItem.getPath && element.getChildren().toArray().find(child => child.getPath() === childItem.getPath())); }, findItemId: function(itemId) { @@ -590,13 +535,6 @@ qx.Class.define("osparc.file.FilesTree", { } }, - __itemSelected: function() { - let selectedItem = this.getSelectedItem(); - if (selectedItem) { - this.fireEvent("itemSelected"); - } - }, - __addDragAndDropMechanisms: function(item) { if (this.isDragMechanism()) { this.__createDragMechanism(item); diff --git a/services/static-webserver/client/source/class/osparc/file/FolderContent.js b/services/static-webserver/client/source/class/osparc/file/FolderContent.js index badadbadcf2..98cd8d94080 100644 --- a/services/static-webserver/client/source/class/osparc/file/FolderContent.js +++ b/services/static-webserver/client/source/class/osparc/file/FolderContent.js @@ -22,7 +22,8 @@ qx.Class.define("osparc.file.FolderContent", { this.base(arguments); this.getChildControl("icons-layout"); - this.getChildControl("table"); + const table = this.getChildControl("table"); + this.__attachListenersToTable(table); }, properties: { @@ -36,7 +37,7 @@ qx.Class.define("osparc.file.FolderContent", { mode: { check: ["list", "icons"], - init: "icons", + init: "list", nullable: false, event: "changeMode", apply: "__reloadFolderContent" @@ -55,7 +56,7 @@ qx.Class.define("osparc.file.FolderContent", { "selectionChanged": "qx.event.type.Data", // tap "multiSelectionChanged": "qx.event.type.Data", // tap "openItemSelected": "qx.event.type.Data", // dbltap - "requestDatasetFiles": "qx.event.type.Data", + "requestPathItems": "qx.event.type.Data", }, statics: { @@ -77,10 +78,14 @@ qx.Class.define("osparc.file.FolderContent", { return item; }, + getIcon: function(entry) { + return osparc.file.FilesTree.isDir(entry) ? "@MaterialIcons/folder" : "@MaterialIcons/insert_drive_file"; + }, + T_POS: { TYPE: 0, NAME: 1, - DATE: 2, + MODIFIED_DATE: 2, SIZE: 3, ID: 4 } @@ -105,9 +110,9 @@ qx.Class.define("osparc.file.FolderContent", { }); control.getTableColumnModel().setDataCellRenderer(this.self().T_POS.TYPE, new qx.ui.table.cellrenderer.Image()); control.setColumnWidth(this.self().T_POS.TYPE, 30); - control.setColumnWidth(this.self().T_POS.NAME, 360); - control.setColumnWidth(this.self().T_POS.DATE, 170); - control.setColumnWidth(this.self().T_POS.SIZE, 70); + control.setColumnWidth(this.self().T_POS.NAME, 250); + control.setColumnWidth(this.self().T_POS.MODIFIED_DATE, 125); + control.setColumnWidth(this.self().T_POS.SIZE, 80); this.bind("mode", control, "visibility", { converter: mode => mode === "list" ? "visible" : "excluded" }); @@ -131,19 +136,21 @@ qx.Class.define("osparc.file.FolderContent", { return control || this.base(arguments, id); }, - __convertEntries: function(content) { + __convertChildren: function(children) { const datas = []; - content.forEach(entry => { + children.forEach(child => { const data = { - icon: entry.getIcon ? entry.getIcon() : this.__getIcon(entry), - label: entry.getLabel(), - lastModified: entry.getLastModified ? osparc.utils.Utils.formatDateAndTime(new Date(entry.getLastModified())) : "", - size: entry.getSize ? osparc.utils.Utils.bytesToSize(entry.getSize()) : "", - itemId: entry.getItemId ? entry.getItemId() : null, - entry: entry, + icon: child.getIcon ? child.getIcon() : this.self().getIcon(child), + label: child.getLabel(), + lastModified: child.getLastModified ? osparc.utils.Utils.formatDateAndTime(new Date(child.getLastModified())) : "", + size: child.getSize ? osparc.utils.Utils.bytesToSize(child.getSize()) : "", + itemId: child.getItemId ? child.getItemId() : null, + entry: child, }; datas.push(data); }); + // folders first + datas.sort((a, b) => osparc.file.FilesTree.isFile(a.entry) - osparc.file.FilesTree.isFile(b.entry)); const items = []; if (this.getMode() === "list") { datas.forEach(data => { @@ -189,14 +196,10 @@ qx.Class.define("osparc.file.FolderContent", { return items; }, - __getIcon: function(entry) { - return osparc.file.FilesTree.isDir(entry) ? "@MaterialIcons/folder" : "@MaterialIcons/insert_drive_file"; - }, - __getEntries: function() { if (this.getFolder()) { const children = this.getFolder().getChildren().toArray(); - return this.__convertEntries(children); + return this.__convertChildren(children); } return []; }, @@ -204,9 +207,9 @@ qx.Class.define("osparc.file.FolderContent", { __applyFolder: function(folder) { if (folder) { if (folder.getLoaded && !folder.getLoaded()) { - this.fireDataEvent("requestDatasetFiles", { + this.fireDataEvent("requestPathItems", { locationId: folder.getLocation(), - datasetId: folder.getPath() + path: folder.getPath() }); } @@ -223,7 +226,6 @@ qx.Class.define("osparc.file.FolderContent", { if (this.getMode() === "list") { const table = this.getChildControl("table"); table.setData(entries); - this.__attachListenersToTableItem(table); } else if (this.getMode() === "icons") { const iconsLayout = this.getChildControl("icons-layout"); iconsLayout.removeAll(); @@ -278,14 +280,14 @@ qx.Class.define("osparc.file.FolderContent", { } if (this.isMultiSelect()) { // pass all buttons that are selected - const selectedFiles = []; + const selectedItems = []; const iconsLayout = this.getChildControl("icons-layout"); iconsLayout.getChildren().forEach(btn => { - if (osparc.file.FilesTree.isFile(btn.entry) && btn.getValue()) { - selectedFiles.push(btn.entry); + if (btn.getValue() && "entry" in btn) { + selectedItems.push(btn.entry); } }); - this.__selectionChanged(selectedFiles); + this.__selectionChanged(selectedItems); } else { // unselect the other items const iconsLayout = this.getChildControl("icons-layout"); @@ -304,28 +306,28 @@ qx.Class.define("osparc.file.FolderContent", { }, this); }, - __attachListenersToTableItem: function(table) { + __attachListenersToTable: function(table) { table.addListener("cellTap", e => { if (e.getNativeEvent().ctrlKey) { this.setMultiSelect(true); } - const selectedFiles = []; + const selectedItems = []; const selectionRanges = table.getSelectionModel().getSelectedRanges(); selectionRanges.forEach(range => { for (let i=range.minIndex; i<=range.maxIndex; i++) { const row = table.getTableModel().getRowData(i); - if (osparc.file.FilesTree.isFile(row.entry)) { - selectedFiles.push(row.entry); + if (row && "entry" in row) { + selectedItems.push(row.entry); } } }); - this.__selectionChanged(selectedFiles); + this.__selectionChanged(selectedItems); }, this); table.addListener("cellDbltap", e => { const selectedRow = e.getRow(); - const rowData = table.getTableModel().getRowData(selectedRow); - if ("entry" in rowData) { - this.__itemDblTapped(rowData.entry); + const row = table.getTableModel().getRowData(selectedRow); + if (row && "entry" in row) { + this.__itemDblTapped(row.entry); } }, this); } diff --git a/services/static-webserver/client/source/class/osparc/file/FolderViewer.js b/services/static-webserver/client/source/class/osparc/file/FolderViewer.js index af2ca15fb79..932c2ed9381 100644 --- a/services/static-webserver/client/source/class/osparc/file/FolderViewer.js +++ b/services/static-webserver/client/source/class/osparc/file/FolderViewer.js @@ -22,7 +22,7 @@ qx.Class.define("osparc.file.FolderViewer", { extend: qx.ui.core.Widget, - construct: function(allowMultiselection = true) { + construct: function(allowMultiSelection = true) { this.base(arguments); this._setLayout(new qx.ui.layout.VBox(10)); @@ -33,25 +33,25 @@ qx.Class.define("osparc.file.FolderViewer", { folderUpBtn.addListener("execute", () => this.fireDataEvent("folderUp", this.getFolder()), this); this.getChildControl("folder-path"); let multiSelectButton = null; - if (allowMultiselection) { + if (allowMultiSelection) { multiSelectButton = this.getChildControl("multi-select-button"); } - const gridViewButton = this.getChildControl("view-options-icons"); const listViewButton = this.getChildControl("view-options-list"); + const gridViewButton = this.getChildControl("view-options-icons"); const folderContent = this.getChildControl("folder-content"); const selectedFileLayout = this.getChildControl("selected-file-layout"); this.bind("folder", this.getChildControl("folder-up"), "enabled", { - converter: folder => Boolean(folder && folder.getPathLabel && folder.getPathLabel().length > 1) + converter: folder => Boolean(folder && folder.getDisplayPath && folder.getDisplayPath()) }); this.bind("folder", this.getChildControl("folder-path"), "value", { - converter: folder => folder ? folder.getPathLabel().join(" / ") : this.tr("Select folder") + converter: folder => folder && folder.getDisplayPath ? folder.getDisplayPath() : this.tr("Select folder") }); this.bind("folder", folderContent, "folder"); - if (allowMultiselection) { + if (allowMultiSelection) { multiSelectButton.bind("value", folderContent, "multiSelect"); folderContent.bind("multiSelect", multiSelectButton, "value"); multiSelectButton.addListener("changeValue", e => { @@ -69,7 +69,7 @@ qx.Class.define("osparc.file.FolderViewer", { multiSelectButton.setValue(false); }); - folderContent.addListener("requestDatasetFiles", e => this.fireDataEvent("requestDatasetFiles", e.getData())); + folderContent.addListener("requestPathItems", e => this.fireDataEvent("requestPathItems", e.getData())); folderContent.addListener("selectionChanged", e => { const selectionData = e.getData(); selectedFileLayout.setItemSelected(selectionData); @@ -81,9 +81,6 @@ qx.Class.define("osparc.file.FolderViewer", { folderContent.addListener("openItemSelected", e => { const entry = e.getData(); this.fireDataEvent("openItemSelected", entry); - if (osparc.file.FilesTree.isDir(entry)) { - this.setFolder(entry); - } }); }, @@ -100,7 +97,7 @@ qx.Class.define("osparc.file.FolderViewer", { events: { "openItemSelected": "qx.event.type.Data", // dbltap "folderUp": "qx.event.type.Data", - "requestDatasetFiles": "qx.event.type.Data" + "requestPathItems": "qx.event.type.Data" }, members: { @@ -128,6 +125,7 @@ qx.Class.define("osparc.file.FolderViewer", { marginLeft: 10, marginRight: 10 }); + control.bind("value", control, "toolTipText"); header.addAt(control, 1, { flex: 1 }); @@ -142,12 +140,13 @@ qx.Class.define("osparc.file.FolderViewer", { header.addAt(control, 2); break; } - case "view-options-rgroup": + case "view-options-radio-group": control = new qx.ui.form.RadioGroup(); break; case "view-options-icons": { control = new qx.ui.form.ToggleButton(null, "@MaterialIcons/apps/18"); - const group = this.getChildControl("view-options-rgroup"); + osparc.utils.Utils.setIdToWidget(control, "folderGridView"); + const group = this.getChildControl("view-options-radio-group"); group.add(control); const header = this.getChildControl("header"); header.addAt(control, 3); @@ -155,7 +154,7 @@ qx.Class.define("osparc.file.FolderViewer", { } case "view-options-list": { control = new qx.ui.form.ToggleButton(null, "@MaterialIcons/reorder/18"); - const group = this.getChildControl("view-options-rgroup"); + const group = this.getChildControl("view-options-radio-group"); group.add(control); const header = this.getChildControl("header"); header.addAt(control, 4); diff --git a/services/static-webserver/client/source/class/osparc/file/StorageAsyncJob.js b/services/static-webserver/client/source/class/osparc/file/StorageAsyncJob.js new file mode 100644 index 00000000000..ac45eab9c24 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/file/StorageAsyncJob.js @@ -0,0 +1,98 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.file.StorageAsyncJob", { + extend: qx.core.Object, + + construct: function(jobId, interval = 1000) { + this.base(arguments); + + this.setPollInterval(interval); + + this.setJobId(jobId); + }, + + events: { + "resultReceived": "qx.event.type.Data", + "taskAborted": "qx.event.type.Event", + "pollingError": "qx.event.type.Data", + }, + + properties: { + pollInterval: { + check: "Number", + nullable: false, + init: 1000 + }, + + jobId: { + check: "String", + nullable: false, + apply: "fetchStatus", + }, + }, + + members: { + __retries: null, + __aborting: null, + + fetchStatus: function() { + const jobId = this.getJobId(); + osparc.data.Resources.fetch("storageAsyncJobs", "jobStatus", { url: { jobId } }) + .then(status => { + if (this.__aborting) { + return; + } + if (status["done"]) { + this.__fetchResults(); + } else { + setTimeout(() => this.fetchStatus(), this.getPollInterval()); + } + }) + .catch(err => { + if (this.__retries > 0) { + this.__retries--; + this.fetchStatus(); + return; + } + this.fireDataEvent("pollingError", err); + }); + }, + + __fetchResults: function() { + const jobId = this.getJobId(); + osparc.data.Resources.fetch("storageAsyncJobs", "jobResult", { url: { jobId } }) + .then(resp => { + this.fireDataEvent("resultReceived", resp["result"]); + }) + .catch(err => { + console.error(err); + this.fireDataEvent("pollingError", err); + }); + }, + + abortRequested: function() { + this.__aborting = true; + const jobId = this.getJobId(); + osparc.data.Resources.fetch("storageAsyncJobs", "result", { url: { jobId } }) + .then(() => this.fireEvent("taskAborted")) + .catch(err => { + throw err; + }); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/file/TreeFolderView.js b/services/static-webserver/client/source/class/osparc/file/TreeFolderView.js index d85836d5ed7..a0fb8bc88ee 100644 --- a/services/static-webserver/client/source/class/osparc/file/TreeFolderView.js +++ b/services/static-webserver/client/source/class/osparc/file/TreeFolderView.js @@ -40,6 +40,12 @@ qx.Class.define("osparc.file.TreeFolderView", { _createChildControlImpl: function(id) { let control; switch (id) { + case "header-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox()).set({ + marginLeft: 8 + }); + this._addAt(control, 0); + break; case "reload-button": control = new qx.ui.form.Button().set({ label: this.tr("Reload"), @@ -47,7 +53,16 @@ qx.Class.define("osparc.file.TreeFolderView", { icon: "@FontAwesome5Solid/sync-alt/14", allowGrowX: false }); - this._add(control); + this.getChildControl("header-layout").add(control); + break; + case "total-size-label": + control = new qx.ui.basic.Atom().set({ + label: this.tr("Calculating Size"), + font: "text-14", + icon: "@FontAwesome5Solid/spinner/14", + allowGrowX: false + }); + this.getChildControl("header-layout").add(control); break; case "tree-folder-layout": control = new qx.ui.splitpane.Pane("horizontal"); @@ -80,22 +95,35 @@ qx.Class.define("osparc.file.TreeFolderView", { }, __buildLayout: function() { - this.getChildControl("reload-button"); const folderTree = this.getChildControl("folder-tree"); const folderViewer = this.getChildControl("folder-viewer"); - // Connect elements folderTree.addListener("selectionChanged", () => { - const selectedFolder = folderTree.getSelectedItem(); - if (selectedFolder && (osparc.file.FilesTree.isDir(selectedFolder) || (selectedFolder.getChildren && selectedFolder.getChildren().length))) { - folderViewer.setFolder(selectedFolder); + const selectedModel = folderTree.getSelectedItem(); + if (selectedModel) { + if (osparc.file.FilesTree.isDir(selectedModel)) { + folderViewer.setFolder(selectedModel); + } + if (selectedModel.getPath() && !selectedModel.getLoaded()) { + selectedModel.setLoaded(true); + folderTree.requestPathItems(selectedModel.getLocation(), selectedModel.getPath()); + } } }, this); folderViewer.addListener("openItemSelected", e => { - const data = e.getData(); - folderTree.openNodeAndParents(data); - folderTree.setSelection(new qx.data.Array([data])); + const selectedModel = e.getData(); + if (selectedModel) { + if (osparc.file.FilesTree.isDir(selectedModel)) { + folderViewer.setFolder(selectedModel); + } + folderTree.openNodeAndParents(selectedModel); + folderTree.setSelection(new qx.data.Array([selectedModel])); + if (selectedModel.getPath() && !selectedModel.getLoaded()) { + selectedModel.setLoaded(true); + folderTree.requestPathItems(selectedModel.getLocation(), selectedModel.getPath()); + } + } }, this); folderViewer.addListener("folderUp", e => { @@ -103,14 +131,11 @@ qx.Class.define("osparc.file.TreeFolderView", { const parent = folderTree.getParent(currentFolder); if (parent) { folderTree.setSelection(new qx.data.Array([parent])); - folderViewer.setFolder(parent); + if (osparc.file.FilesTree.isDir(parent)) { + folderViewer.setFolder(parent); + } } }, this); - - folderViewer.addListener("requestDatasetFiles", e => { - const data = e.getData(); - folderTree.requestDatasetFiles(data.locationId, data.datasetId); - }, this); }, openPath: function(path) { @@ -129,6 +154,33 @@ qx.Class.define("osparc.file.TreeFolderView", { } else { folderViewer.resetFolder(); } + }, + + requestSize: function(pathId) { + const totalSize = this.getChildControl("total-size-label"); + totalSize.getChildControl("icon").getContentElement().addClass("rotate"); + + osparc.data.Resources.fetch("storagePaths", "requestSize", { url: { pathId } }) + .then(resp => { + const jobId = resp["job_id"]; + if (jobId) { + const asyncJob = new osparc.file.StorageAsyncJob(jobId); + asyncJob.addListener("resultReceived", e => { + const size = e.getData(); + totalSize.set({ + icon: null, + label: this.tr("Total size: ") + osparc.utils.Utils.bytesToSize(size), + }); + }); + asyncJob.addListener("pollingError", e => { + totalSize.hide(); + }); + } + }) + .catch(err => { + console.error(err); + totalSize.hide(); + }); } } }); diff --git a/services/static-webserver/client/source/class/osparc/form/json/JsonSchemaForm.js b/services/static-webserver/client/source/class/osparc/form/json/JsonSchemaForm.js index 23985546320..8bee13a4f6c 100644 --- a/services/static-webserver/client/source/class/osparc/form/json/JsonSchemaForm.js +++ b/services/static-webserver/client/source/class/osparc/form/json/JsonSchemaForm.js @@ -90,7 +90,7 @@ qx.Class.define("osparc.form.json.JsonSchemaForm", { } else { // Validation failed this._add(new qx.ui.basic.Label().set({ - value: this.tr("There was an error generating the form or one or more schemas failed to validate. Check your Javascript console for more details."), + value: this.tr("There was an issue generating the form or one or more schemas failed to validate. Check your Javascript console for more details."), font: "title-16", textColor: "service-window-hint", rich: true, @@ -114,8 +114,8 @@ qx.Class.define("osparc.form.json.JsonSchemaForm", { if (errors) { console.error(errors); if (showMessage) { - let message = `${errors[0].dataPath} ${errors[0].message}`; - osparc.FlashMessenger.logAs(message, "ERROR"); + const message = `${errors[0].dataPath} ${errors[0].message}`; + osparc.FlashMessenger.logError(message); } return false; } diff --git a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js index ccd205a92e0..b62fc32be29 100644 --- a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js +++ b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js @@ -84,7 +84,7 @@ qx.Class.define("osparc.form.renderer.PropForm", { isFieldParametrizable: function(field) { const supportedTypes = []; - const paramsMD = osparc.service.Utils.getParametersMetadata(); + const paramsMD = osparc.store.Services.getParametersMetadata(); paramsMD.forEach(paramMD => { supportedTypes.push(osparc.node.ParameterEditor.getParameterOutputTypeFromMD(paramMD)); }); @@ -771,9 +771,7 @@ qx.Class.define("osparc.form.renderer.PropForm", { }); this.__highlightCompatibles(compatiblePorts); }) - .catch(err => { - console.error(err); - }); + .catch(err => console.error(err)); e.preventDefault(); } diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js index 111c020fda6..04b6c06c393 100644 --- a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js +++ b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js @@ -70,7 +70,9 @@ qx.Class.define("osparc.form.tag.TagManager", { this._add(filter); const tagsContainer = this.__tagsContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()); - this._add(tagsContainer, { + const scrollTags = new qx.ui.container.Scroll(); + scrollTags.add(tagsContainer); + this._add(scrollTags, { flex: 1 }); @@ -90,6 +92,10 @@ qx.Class.define("osparc.form.tag.TagManager", { newItem.addListener("cancelNewTag", e => tagsContainer.remove(e.getTarget()), this); newItem.addListener("deleteTag", e => tagsContainer.remove(e.getTarget()), this); tagsContainer.add(newItem); + + // scroll down + const height = scrollTags.getSizeHint().height; + scrollTags.scrollToY(height); }); this._add(addTagButton); @@ -120,7 +126,11 @@ qx.Class.define("osparc.form.tag.TagManager", { __repopulateTags: function() { this.__tagsContainer.removeAll(); const tags = osparc.store.Tags.getInstance().getTags(); - tags.forEach(tag => this.__tagsContainer.add(this.__tagButton(tag))); + const tagButtons = []; + tags.forEach(tag => tagButtons.push(this.__tagButton(tag))); + // list the selected tags first + tagButtons.sort((a, b) => b.getValue() - a.getValue()); + tagButtons.forEach(tagButton => this.__tagsContainer.add(tagButton)); }, __tagButton: function(tag) { diff --git a/services/static-webserver/client/source/class/osparc/info/CommentAdd.js b/services/static-webserver/client/source/class/osparc/info/CommentAdd.js index 53b26c23bad..43a8aa86edc 100644 --- a/services/static-webserver/client/source/class/osparc/info/CommentAdd.js +++ b/services/static-webserver/client/source/class/osparc/info/CommentAdd.js @@ -125,10 +125,7 @@ qx.Class.define("osparc.info.CommentAdd", { this.fireEvent("commentAdded"); commentField.getChildControl("text-area").setValue(""); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); } }); } diff --git a/services/static-webserver/client/source/class/osparc/info/MergedLarge.js b/services/static-webserver/client/source/class/osparc/info/MergedLarge.js index d85ffef353d..ff95c2ea350 100644 --- a/services/static-webserver/client/source/class/osparc/info/MergedLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/MergedLarge.js @@ -415,9 +415,8 @@ qx.Class.define("osparc.info.MergedLarge", { qx.event.message.Bus.getInstance().dispatchByName("updateStudy", studyData); }) .catch(err => { - console.error(err); - const msg = err.message || this.tr("There was an error while updating the information."); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const msg = this.tr("An issue occurred while updating the information."); + osparc.FlashMessenger.logError(err, msg); }); } } diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js index 217c13e58e9..d0ab2cf36b1 100644 --- a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js @@ -143,9 +143,14 @@ qx.Class.define("osparc.info.ServiceLarge", { // Show description only vBox.add(description.getChildren()[1]); } else { - const title = this.__createTitle(); + const hBox = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + const icon = this.__createIcon(); + const iconLayout = this.__createViewWithEdit(icon, this.__openIconEditor); + hBox.add(iconLayout); + const title = this.__createName(); const titleLayout = this.__createViewWithEdit(title, this.__openTitleEditor); - vBox.add(titleLayout); + hBox.add(titleLayout); + vBox.add(hBox); const extraInfo = this.__extraInfo(); const extraInfoLayout = this.__createExtraInfo(extraInfo); @@ -218,7 +223,16 @@ qx.Class.define("osparc.info.ServiceLarge", { return null; }, - __createTitle: function() { + __createIcon: function() { + const serviceIcon = this.getService()["icon"] || "osparc/no_photography_black_24dp.svg"; + const icon = osparc.dashboard.CardBase.createCardIcon().set({ + source: serviceIcon, + }); + osparc.utils.Utils.setAltToImage(icon.getChildControl("image"), "card-icon"); + return icon; + }, + + __createName: function() { const serviceName = this.getService()["name"]; let text = ""; if (this.getInstanceLabel()) { @@ -443,9 +457,19 @@ qx.Class.define("osparc.info.ServiceLarge", { return container; }, + __openIconEditor: function() { + const iconEditor = new osparc.widget.Renamer(this.getService()["icon"], null, this.tr("Edit Icon")); + iconEditor.addListener("labelChanged", e => { + iconEditor.close(); + const newIcon = e.getData()["newLabel"]; + this.__patchService("icon", newIcon); + }, this); + iconEditor.center(); + iconEditor.open(); + }, + __openTitleEditor: function() { - const title = this.tr("Edit Title"); - const titleEditor = new osparc.widget.Renamer(this.getService()["name"], null, title); + const titleEditor = new osparc.widget.Renamer(this.getService()["name"], null, this.tr("Edit Name")); titleEditor.addListener("labelChanged", e => { titleEditor.close(); const newLabel = e.getData()["newLabel"]; @@ -551,9 +575,8 @@ qx.Class.define("osparc.info.ServiceLarge", { this.fireDataEvent("updateService", this.getService()); }) .catch(err => { - console.error(err); - const msg = err.message || this.tr("There was an error while updating the information."); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const msg = this.tr("An issue occurred while updating the information."); + osparc.FlashMessenger.logError(err, msg); }) .finally(() => this.setEnabled(true)); } diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js b/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js index 3944e488890..7e4710bcbe2 100644 --- a/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js +++ b/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js @@ -64,14 +64,14 @@ qx.Class.define("osparc.info.ServiceUtils", { }, createVersionDisplay: function(key, version) { - const versionDisplay = osparc.service.Utils.getVersionDisplay(key, version); + const versionDisplay = osparc.store.Services.getVersionDisplay(key, version); const label = new qx.ui.basic.Label(versionDisplay); osparc.utils.Utils.setIdToWidget(label, "serviceVersion"); return label; }, createReleasedDate: function(key, version) { - const releasedDate = osparc.service.Utils.getReleasedDate(key, version); + const releasedDate = osparc.store.Services.getReleasedDate(key, version); if (releasedDate) { const label = new qx.ui.basic.Label(); label.set({ @@ -181,7 +181,7 @@ qx.Class.define("osparc.info.ServiceUtils", { // display markdown link content if that's the case if ( osparc.utils.Utils.isValidHttpUrl(serviceData["description"]) && - serviceData["description"].slice(-3) === ".md" + serviceData["description"].endsWith(".md") ) { // if it's a link, fetch the content fetch(serviceData["description"]) diff --git a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js index 6512e4a459d..e0b66e41047 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js @@ -62,19 +62,9 @@ qx.Class.define("osparc.info.StudyLarge", { const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - const mainHBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); - - const leftVBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); - mainHBox.add(leftVBox, { - flex: 1 - }); - - vBox.add(mainHBox); - - const extraInfo = this.__extraInfo(); - const extraInfoLayout = this.__createExtraInfo(extraInfo); - - leftVBox.add(extraInfoLayout); + const infoElements = this.__infoElements(); + const infoLayout = osparc.info.StudyUtils.infoElementsToLayout(infoElements); + vBox.add(infoLayout); let text = osparc.product.Utils.getStudyAlias({firstUpperCase: true}) + " Id"; if (this.__isTemplate) { @@ -87,7 +77,7 @@ qx.Class.define("osparc.info.StudyLarge", { allowGrowX: false }); copyIdButton.addListener("execute", () => osparc.utils.Utils.copyTextToClipboard(this.getStudy().getUuid())); - leftVBox.add(copyIdButton); + vBox.add(copyIdButton); const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(vBox); @@ -97,8 +87,8 @@ qx.Class.define("osparc.info.StudyLarge", { }); }, - __extraInfo: function() { - const extraInfo = { + __infoElements: function() { + const infoLayout = { "TITLE": { label: this.tr("Title:"), view: osparc.info.StudyUtils.createTitle(this.getStudy()), @@ -166,7 +156,7 @@ qx.Class.define("osparc.info.StudyLarge", { this.getStudy().getQuality() && osparc.metadata.Quality.isEnabled(this.getStudy().getQuality()) ) { - extraInfo["QUALITY"] = { + infoLayout["QUALITY"] = { label: this.tr("Quality:"), view: osparc.info.StudyUtils.createQuality(this.getStudy()), action: { @@ -178,7 +168,7 @@ qx.Class.define("osparc.info.StudyLarge", { } if (osparc.product.Utils.showClassifiers()) { - extraInfo["CLASSIFIERS"] = { + infoLayout["CLASSIFIERS"] = { label: this.tr("Classifiers:"), view: osparc.info.StudyUtils.createClassifiers(this.getStudy()), action: (this.getStudy().getClassifiers().length || this.__canIWrite()) ? { @@ -192,18 +182,14 @@ qx.Class.define("osparc.info.StudyLarge", { if (!this.__isTemplate) { const pathLabel = new qx.ui.basic.Label(); pathLabel.setValue(this.getStudy().getLocationString()); - extraInfo["LOCATION"] = { + infoLayout["LOCATION"] = { label: this.tr("Location:"), view: pathLabel, action: null }; } - return extraInfo; - }, - - __createExtraInfo: function(extraInfo) { - return osparc.info.StudyUtils.createExtraInfoGrid(extraInfo); + return infoLayout; }, __createStudyId: function() { @@ -330,9 +316,8 @@ qx.Class.define("osparc.info.StudyLarge", { } }) .catch(err => { - console.error(err); - const msg = err.message || this.tr("There was an error while updating the information."); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + const msg = this.tr("An issue occurred while updating the information."); + osparc.FlashMessenger.logError(err, msg); }); } } diff --git a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js index b143f127a05..386a3958d18 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js @@ -205,7 +205,9 @@ qx.Class.define("osparc.info.StudyUtils", { * @param study {osparc.data.model.Study} Study Model */ createTags: function(study) { - const tagsContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); + const tagsContainer = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 5)).set({ + maxWidth: 420 + }); const addTags = model => { tagsContainer.removeAll(); @@ -252,7 +254,7 @@ qx.Class.define("osparc.info.StudyUtils", { return titleLayout; }, - createExtraInfoGrid: function(extraInfos) { + infoElementsToLayout: function(extraInfos) { const positions = { TITLE: { column: 0, @@ -308,17 +310,14 @@ qx.Class.define("osparc.info.StudyUtils", { }, }; - const grid = new qx.ui.layout.Grid(15, 5); - const grid2 = new qx.ui.layout.Grid(15, 5); - grid.setColumnAlign(0, "left", "top"); - const container = new qx.ui.container.Composite(new qx.ui.layout.VBox()); - const moreInfo = new qx.ui.container.Composite(grid); - const otherInfo = new qx.ui.container.Composite(grid2); - grid.setColumnFlex(0, 1); - grid2.setColumnFlex(0, 1); + const mainInfoGrid = new qx.ui.layout.Grid(15, 5); + mainInfoGrid.setColumnAlign(0, "left", "top"); + mainInfoGrid.setColumnFlex(0, 1); + const mainInfoLayout = new qx.ui.container.Composite(mainInfoGrid); - const box = this.__createSectionBox(qx.locale.Manager.tr("Details")); - const box2 = this.__createSectionBox(qx.locale.Manager.tr("Meta details")); + const extraInfoGrid = new qx.ui.layout.Grid(15, 5); + const extraInfoLayout = new qx.ui.container.Composite(extraInfoGrid); + extraInfoGrid.setColumnFlex(0, 1); let row = 0; let row2 = 0; @@ -334,35 +333,42 @@ qx.Class.define("osparc.info.StudyUtils", { marginRight: 15 }); } - titleLayout.add(extraInfo.view); - otherInfo.add(titleLayout, { + titleLayout.add(extraInfo.view, { + flex: 1 + }); + extraInfoLayout.add(titleLayout, { row: row2, column: gridInfo.column }); row2++; - grid2.setRowHeight(row2, 5); // spacer + extraInfoGrid.setRowHeight(row2, 5); // spacer row2++; } else { const titleLayout = this.__titleWithEditLayout(extraInfo); - moreInfo.add(titleLayout, { + mainInfoLayout.add(titleLayout, { row, column: gridInfo.column }); row++; - moreInfo.add(extraInfo.view, { + mainInfoLayout.add(extraInfo.view, { row, column: gridInfo.column }); row++; - grid.setRowHeight(row, 5); // spacer + mainInfoGrid.setRowHeight(row, 5); // spacer row++; } } }); - box.add(moreInfo); - box2.add(otherInfo); - container.addAt(box, 0); + + const container = new qx.ui.container.Composite(new qx.ui.layout.VBox()); + const box1 = this.__createSectionBox(qx.locale.Manager.tr("Details")); + box1.add(mainInfoLayout); + container.addAt(box1, 0); + + const box2 = this.__createSectionBox(qx.locale.Manager.tr("Meta details")); + box2.add(extraInfoLayout); container.addAt(box2, 1); return container; @@ -403,114 +409,5 @@ qx.Class.define("osparc.info.StudyUtils", { box.setLayout(new qx.ui.layout.VBox(10)); return box; }, - - patchStudyData: function(studyData, fieldKey, value) { - if (osparc.data.model.Study.OwnPatch.includes(fieldKey)) { - console.error(fieldKey, "has it's own PATCH path"); - return null; - } - - const patchData = {}; - patchData[fieldKey] = value; - const params = { - url: { - "studyId": studyData["uuid"] - }, - data: patchData - }; - return osparc.data.Resources.fetch("studies", "patch", params) - .then(() => { - studyData[fieldKey] = value; - // A bit hacky, but it's not sent back to the backend - studyData["lastChangeDate"] = new Date().toISOString(); - }); - }, - - patchNodeData: function(studyData, nodeId, patchData) { - const params = { - url: { - "studyId": studyData["uuid"], - "nodeId": nodeId - }, - data: patchData - }; - return osparc.data.Resources.fetch("studies", "patchNode", params) - .then(() => { - Object.keys(patchData).forEach(key => { - studyData["workbench"][nodeId][key] = patchData[key]; - }); - // A bit hacky, but it's not sent back to the backend - studyData["lastChangeDate"] = new Date().toISOString(); - }); - }, - - addCollaborator: function(studyData, gid, permissions) { - const params = { - url: { - "studyId": studyData["uuid"], - "gId": gid - }, - data: permissions - }; - return osparc.data.Resources.fetch("studies", "postAccessRights", params) - .then(() => { - studyData["accessRights"][gid] = permissions; - studyData["lastChangeDate"] = new Date().toISOString(); - }) - .catch(err => osparc.FlashMessenger.logAs(err.message, "ERROR")); - }, - - addCollaborators: function(studyData, newCollaborators) { - const promises = []; - Object.keys(newCollaborators).forEach(gid => { - const params = { - url: { - "studyId": studyData["uuid"], - "gId": gid - }, - data: newCollaborators[gid] - }; - promises.push(osparc.data.Resources.fetch("studies", "postAccessRights", params)); - }); - return Promise.all(promises) - .then(() => { - Object.keys(newCollaborators).forEach(gid => { - studyData["accessRights"][gid] = newCollaborators[gid]; - }); - studyData["lastChangeDate"] = new Date().toISOString(); - }) - .catch(err => osparc.FlashMessenger.logAs(err.message, "ERROR")); - }, - - removeCollaborator: function(studyData, gid) { - const params = { - url: { - "studyId": studyData["uuid"], - "gId": gid - } - }; - return osparc.data.Resources.fetch("studies", "deleteAccessRights", params) - .then(() => { - delete studyData["accessRights"][gid]; - studyData["lastChangeDate"] = new Date().toISOString(); - }) - .catch(err => osparc.FlashMessenger.logAs(err.message, "ERROR")); - }, - - updateCollaborator: function(studyData, gid, newPermissions) { - const params = { - url: { - "studyId": studyData["uuid"], - "gId": gid - }, - data: newPermissions - }; - return osparc.data.Resources.fetch("studies", "putAccessRights", params) - .then(() => { - studyData["accessRights"][gid] = newPermissions; - studyData["lastChangeDate"] = new Date().toISOString(); - }) - .catch(err => osparc.FlashMessenger.logAs(err.message, "ERROR")); - }, } }); diff --git a/services/static-webserver/client/source/class/osparc/jobs/JobInfo.js b/services/static-webserver/client/source/class/osparc/jobs/JobInfo.js new file mode 100644 index 00000000000..b05bca077df --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/jobs/JobInfo.js @@ -0,0 +1,59 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.jobs.JobInfo", { + extend: qx.ui.core.Widget, + + construct(jobId) { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox()); + + const jobInfoViewer = this.getChildControl("job-info-viewer"); + osparc.store.Jobs.getInstance().fetchJobInfo(jobId) + .then(info => { + jobInfoViewer.setJson(info); + }); + }, + + statics: { + popUpInWindow: function(jobInfo) { + const title = qx.locale.Manager.tr("Job Info"); + const win = osparc.ui.window.Window.popUpInWindow(jobInfo, title, 600, 400); + win.open(); + return win; + } + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "job-info-viewer": { + control = new osparc.ui.basic.JsonTreeWidget(); + const container = new qx.ui.container.Scroll(); + container.add(control); + this._add(container); + break; + } + } + + return control || this.base(arguments, id); + }, + } +}) diff --git a/services/static-webserver/client/source/class/osparc/jobs/JobsBrowser.js b/services/static-webserver/client/source/class/osparc/jobs/JobsBrowser.js new file mode 100644 index 00000000000..bab800f08d6 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/jobs/JobsBrowser.js @@ -0,0 +1,85 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.jobs.JobsBrowser", { + extend: qx.ui.core.Widget, + + construct() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(10)); + + const jobsFilter = this.getChildControl("jobs-filter"); + this.getChildControl("jobs-ongoing"); + const jobsTable = this.getChildControl("jobs-table"); + + jobsFilter.getChildControl("textfield").addListener("input", e => { + const filterText = e.getData(); + jobsTable.getTableModel().setFilters({ + text: filterText, + }); + }); + }, + + statics: { + popUpInWindow: function(jobsBrowser) { + if (!jobsBrowser) { + jobsBrowser = new osparc.jobs.JobsBrowser(); + } + const title = qx.locale.Manager.tr("Jobs"); + const win = osparc.ui.window.Window.popUpInWindow(jobsBrowser, title, 1100, 500); + win.open(); + return win; + } + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "header-filter": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); + this._add(control); + break; + case "jobs-filter": + control = new osparc.filter.TextFilter("text", "jobsList").set({ + allowStretchX: true, + margin: 0 + }); + this.getChildControl("header-filter").add(control, { + flex: 1 + }); + break; + case "jobs-ongoing": + control = new qx.ui.form.CheckBox().set({ + label: "Hide finished jobs", + value: true, + enabled: false, + }); + this.getChildControl("header-filter").add(control); + break; + case "jobs-table": + control = new osparc.jobs.JobsTable(); + this._add(control); + break; + } + + return control || this.base(arguments, id); + }, + } +}) diff --git a/services/static-webserver/client/source/class/osparc/jobs/JobsButton.js b/services/static-webserver/client/source/class/osparc/jobs/JobsButton.js new file mode 100644 index 00000000000..0cb4379bab3 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/jobs/JobsButton.js @@ -0,0 +1,84 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.jobs.JobsButton", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.Canvas()); + + this.set({ + width: 30, + alignX: "center", + cursor: "pointer", + visibility: "excluded", + toolTipText: this.tr("Jobs"), + }); + + const jobsStore = osparc.store.Jobs.getInstance(); + jobsStore.addListener("changeJobs", e => this.__updateJobsButton(), this); + this.addListener("tap", () => osparc.jobs.JobsBrowser.popUpInWindow(), this); + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "icon": { + control = new qx.ui.basic.Image("@FontAwesome5Solid/cog/22"); + osparc.utils.Utils.addClass(control.getContentElement(), "rotateSlow"); + + const logoContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ + alignY: "middle" + })); + logoContainer.add(control); + + this._add(logoContainer, { + height: "100%" + }); + break; + } + case "number": + control = new qx.ui.basic.Label().set({ + backgroundColor: "background-main-1", + font: "text-12" + }); + control.getContentElement().setStyles({ + "border-radius": "4px" + }); + this._add(control, { + bottom: 8, + right: 4 + }); + break; + } + return control || this.base(arguments, id); + }, + + __updateJobsButton: function() { + this._createChildControlImpl("icon"); + const number = this.getChildControl("number"); + + const jobsStore = osparc.store.Jobs.getInstance(); + const nJobs = jobsStore.getJobs().length; + number.setValue(nJobs.toString()); + nJobs ? this.show() : this.exclude(); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/jobs/JobsTable.js b/services/static-webserver/client/source/class/osparc/jobs/JobsTable.js new file mode 100644 index 00000000000..615e305706e --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/jobs/JobsTable.js @@ -0,0 +1,163 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.jobs.JobsTable", { + extend: qx.ui.table.Table, + + construct: function(filters) { + this.base(arguments); + + const model = new osparc.jobs.JobsTableModel(filters); + this.setTableModel(model); + + this.set({ + statusBarVisible: false, + headerCellHeight: 26, + rowHeight: 26, + }); + + const columnModel = this.getTableColumnModel(); + columnModel.setColumnVisible(this.self().COLS.JOB_ID.column, true); + + Object.values(this.self().COLS).forEach(col => columnModel.setColumnWidth(col.column, col.width)); + + const iconPathInfo = "osparc/circle-info-text.svg"; + const fontButtonRendererInfo = new osparc.ui.table.cellrenderer.ImageButtonRenderer("info", iconPathInfo); + columnModel.setDataCellRenderer(this.self().COLS.INFO.column, fontButtonRendererInfo); + + const iconPathStop = "osparc/circle-stop-text.svg"; + const fontButtonRendererStop = new osparc.ui.table.cellrenderer.ImageButtonRenderer("stop", iconPathStop); + columnModel.setDataCellRenderer(this.self().COLS.ACTION_STOP.column, fontButtonRendererStop); + + const iconPathDelete = "osparc/trash-text.svg"; + const fontButtonRendererDelete = new osparc.ui.table.cellrenderer.ImageButtonRenderer("delete", iconPathDelete); + columnModel.setDataCellRenderer(this.self().COLS.ACTION_DELETE.column, fontButtonRendererDelete); + + const iconPathLogs = "osparc/logs-text.svg"; + const fontButtonRendererLogs = new osparc.ui.table.cellrenderer.ImageButtonRenderer("logs", iconPathLogs); + columnModel.setDataCellRenderer(this.self().COLS.ACTION_LOGS.column, fontButtonRendererLogs); + + this.__attachHandlers(); + }, + + statics: { + COLS: { + JOB_ID: { + id: "jobId", + column: 0, + label: qx.locale.Manager.tr("Job Id"), + width: 170 + }, + SOLVER: { + id: "solver", + column: 1, + label: qx.locale.Manager.tr("Solver"), + width: 100 + }, + STATUS: { + id: "status", + column: 2, + label: qx.locale.Manager.tr("Status"), + width: 170 + }, + PROGRESS: { + id: "progress", + column: 3, + label: qx.locale.Manager.tr("Progress"), + width: 80 + }, + SUBMIT: { + id: "submit", + column: 4, + label: qx.locale.Manager.tr("Submitted"), + width: 130 + }, + START: { + id: "start", + column: 5, + label: qx.locale.Manager.tr("Started"), + width: 130 + }, + INFO: { + id: "info", + column: 6, + label: qx.locale.Manager.tr("Info"), + width: 40 + }, + INSTANCE: { + id: "instance", + column: 7, + label: qx.locale.Manager.tr("Instance"), + width: 90 + }, + ACTION_STOP: { + id: "info", + column: 8, + label: "", + width: 40 + }, + ACTION_DELETE: { + id: "info", + column: 9, + label: "", + width: 40 + }, + ACTION_LOGS: { + id: "info", + column: 10, + label: "", + width: 40 + }, + } + }, + + members: { + __attachHandlers: function() { + this.addListener("cellTap", e => { + const row = e.getRow(); + const target = e.getOriginalTarget(); + if (target.closest(".qx-material-button") && (target.tagName === "IMG" || target.tagName === "DIV")) { + const action = target.closest(".qx-material-button").getAttribute("data-action"); + if (action) { + this.__handleButtonClick(action, row); + } + } + }); + }, + + __handleButtonClick: function(action, row) { + const rowData = this.getTableModel().getRowData(row); + switch (action) { + case "info": { + const jobInfo = new osparc.jobs.JobInfo(rowData["jobId"]); + osparc.jobs.JobInfo.popUpInWindow(jobInfo); + break; + } + case "stop": + case "delete": + case "logs": { + const msg = `I wish I could ${action} the job ${rowData["jobId"]}`; + osparc.FlashMessenger.logAs(msg, "WARNING"); + break; + } + default: + console.warn(`Unknown action: ${action}`); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/jobs/JobsTableModel.js b/services/static-webserver/client/source/class/osparc/jobs/JobsTableModel.js new file mode 100644 index 00000000000..bfac55fd45e --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/jobs/JobsTableModel.js @@ -0,0 +1,194 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.jobs.JobsTableModel", { + extend: qx.ui.table.model.Remote, + + construct(filters) { + this.base(arguments); + + const jobsCols = osparc.jobs.JobsTable.COLS; + const colLabels = Object.values(jobsCols).map(col => col.label); + const colIDs = Object.values(jobsCols).map(col => col.id); + this.setColumns(colLabels, colIDs); + + if (filters) { + this.setFilters(filters); + } + + this.setSortColumnIndexWithoutSortingData(jobsCols.SUBMIT.column); + this.setSortAscendingWithoutSortingData(false); + this.setColumnSortable(jobsCols.INFO.column, false); + this.setColumnSortable(jobsCols.ACTION_STOP.column, false); + this.setColumnSortable(jobsCols.ACTION_DELETE.column, false); + this.setColumnSortable(jobsCols.ACTION_LOGS.column, false); + }, + + properties: { + isFetching: { + check: "Boolean", + init: false, + event: "changeFetching" + }, + + filters: { + check: "Object", + init: null, + apply: "reloadData", // force reload + }, + + orderBy: { + check: "Object", + init: { + field: "started_at", + direction: "desc" + } + }, + }, + + statics: { + SERVER_MAX_LIMIT: 49, + COLUMN_ID_TO_DB_COLUMN_MAP: { + 0: "started_at", + }, + }, + + members: { + // this should be done by the backend + __filterJobs: function(jobs) { + const filters = this.getFilters(); + return jobs.filter(job => { + if (filters) { + let match = false; + [ + "jobId", + "solver", + "status", + "instance", + ].forEach(filterableField => { + const getter = "get" + qx.lang.String.firstUp(filterableField); + const value = job[getter](); + // lowercase both + if (!match && value && value.toLowerCase().includes(filters.text.toLowerCase())) { + match = true; + } + }); + return match; + } + return true; + }); + }, + + // overridden + sortByColumn(columnIndex, ascending) { + this.setOrderBy({ + field: this.self().COLUMN_ID_TO_DB_COLUMN_MAP[columnIndex], + direction: ascending ? "asc" : "desc" + }) + this.base(arguments, columnIndex, ascending); + }, + + // overridden + _loadRowCount() { + const urlParams = { + offset: 0, + limit: 1, + filters: this.getFilters() ? + JSON.stringify({ + "started_at": this.getFilters() + }) : + null, + orderBy: JSON.stringify(this.getOrderBy()), + }; + const options = { + resolveWResponse: true + }; + osparc.store.Jobs.getInstance().fetchJobs(urlParams, options) + .then(jobs => { + const filteredJobs = this.__filterJobs(jobs); + this._onRowCountLoaded(filteredJobs.length); + }) + .catch(() => this._onRowCountLoaded(null)); + }, + + // overridden + _loadRowData(firstRow, qxLastRow) { + this.setIsFetching(true); + + const lastRow = Math.min(qxLastRow, this._rowCount - 1); + // Returns a request promise with given offset and limit + const getFetchPromise = (offset, limit=this.self().SERVER_MAX_LIMIT) => { + const urlParams = { + limit, + offset, + filters: this.getFilters() ? + JSON.stringify({ + "started_at": this.getFilters() + }) : + null, + orderBy: JSON.stringify(this.getOrderBy()) + }; + return osparc.store.Jobs.getInstance().fetchJobs(urlParams) + .then(jobs => { + const filteredJobs = this.__filterJobs(jobs); + const data = []; + const jobsCols = osparc.jobs.JobsTable.COLS; + filteredJobs.forEach(job => { + data.push({ + [jobsCols.JOB_ID.id]: job.getJobId(), + [jobsCols.SOLVER.id]: job.getSolver(), + [jobsCols.STATUS.id]: job.getStatus(), + [jobsCols.PROGRESS.id]: job.getProgress() ? (job.getProgress() + "%") : "-", + [jobsCols.SUBMIT.id]: job.getSubmittedAt() ? osparc.utils.Utils.formatDateAndTime(job.getSubmittedAt()) : "-", + [jobsCols.START.id]: job.getStartedAt() ? osparc.utils.Utils.formatDateAndTime(job.getStartedAt()) : "-", + [jobsCols.INSTANCE.id]: job.getInstance(), + }); + }); + return data; + }); + }; + + // Divides the model row request into several server requests to comply with the number of rows server limit + const reqLimit = lastRow - firstRow + 1; // Number of requested rows + const nRequests = Math.ceil(reqLimit / this.self().SERVER_MAX_LIMIT); + if (nRequests > 1) { + const requests = []; + for (let i=firstRow; i <= lastRow; i += this.self().SERVER_MAX_LIMIT) { + requests.push(getFetchPromise(i, i > lastRow - this.self().SERVER_MAX_LIMIT + 1 ? reqLimit % this.self().SERVER_MAX_LIMIT : this.self().SERVER_MAX_LIMIT)) + } + Promise.all(requests) + .then(responses => this._onRowDataLoaded(responses.flat())) + .catch(err => { + console.error(err); + this._onRowDataLoaded(null); + }) + .finally(() => this.setIsFetching(false)); + } else { + getFetchPromise(firstRow, reqLimit) + .then(data => { + this._onRowDataLoaded(data); + }) + .catch(err => { + console.error(err) + this._onRowDataLoaded(null); + }) + .finally(() => this.setIsFetching(false)); + } + } + } +}) diff --git a/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js b/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js index 3f66e39d7ec..e1337f16f38 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js @@ -130,11 +130,11 @@ qx.Class.define("osparc.metadata.ClassifiersEditor", { btn.setFetching(true); osparc.data.Resources.fetch("classifiers", "postRRID", params) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr("RRID classifier successfully added"), "INFO"); + osparc.FlashMessenger.logAs(this.tr("RRID classifier successfully added"), "INFO"); osparc.store.Store.getInstance().getAllClassifiers(true); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(err, "ERROR"); + osparc.FlashMessenger.logError(err); }) .finally(() => { btn.setFetching(false); @@ -146,28 +146,22 @@ qx.Class.define("osparc.metadata.ClassifiersEditor", { const newClassifiers = this.__classifiersTree.getCheckedClassifierIDs(); if (osparc.utils.Resources.isStudy(this.__resourceData) || osparc.utils.Resources.isTemplate(this.__resourceData)) { - osparc.info.StudyUtils.patchStudyData(this.__resourceData, "classifiers", newClassifiers) + osparc.store.Study.patchStudyData(this.__resourceData, "classifiers", newClassifiers) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Classifiers successfully edited")); + osparc.FlashMessenger.logAs(this.tr("Classifiers successfully edited")); saveBtn.setFetching(false); this.fireDataEvent("updateClassifiers", this.__resourceData); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong editing Classifiers"), "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while editing classifiers"))); } else { const serviceDataCopy = osparc.utils.Utils.deepCloneObject(this.__resourceData); osparc.store.Services.patchServiceData(serviceDataCopy, "classifiers", newClassifiers) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Classifiers successfully edited")); + osparc.FlashMessenger.logAs(this.tr("Classifiers successfully edited")); saveBtn.setFetching(false); this.fireDataEvent("updateClassifiers", serviceDataCopy); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong editing Classifiers"), "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while editing classifiers"))); } } } diff --git a/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js b/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js index c2d7d59752e..2178057ae2d 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js +++ b/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js @@ -73,7 +73,7 @@ qx.Class.define("osparc.metadata.QualityEditor", { __initResourceData: function(resourceData) { if (!("quality" in resourceData)) { - osparc.FlashMessenger.logAs(this.tr("Quality Assessment data not found"), "ERROR"); + osparc.FlashMessenger.logError(this.tr("Quality Assessment data not found")); return; } @@ -127,8 +127,8 @@ qx.Class.define("osparc.metadata.QualityEditor", { if (errors) { console.error(errors); if (showMessage) { - let message = `${errors[0].dataPath} ${errors[0].message}`; - osparc.FlashMessenger.logAs(message, "ERROR"); + const message = `${errors[0].dataPath} ${errors[0].message}`; + osparc.FlashMessenger.logError(message); } return false; } @@ -154,7 +154,7 @@ qx.Class.define("osparc.metadata.QualityEditor", { this.__populateForms(); } else { - osparc.FlashMessenger.logAs(this.tr("There was an error validating the metadata."), "ERROR"); + osparc.FlashMessenger.logError(this.tr("There was an issue validating the metadata.")); } }, @@ -466,21 +466,15 @@ qx.Class.define("osparc.metadata.QualityEditor", { this.__initResourceData(serviceDataCopy); this.fireDataEvent("updateQuality", serviceDataCopy); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error while updating the Quality Assessment."), "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("There was an issue while updating the Quality Assessment."))) .finally(() => btn.setFetching(false)); } else { - osparc.info.StudyUtils.patchStudyData(this.__resourceData, "quality", newQuality) + osparc.store.Study.patchStudyData(this.__resourceData, "quality", newQuality) .then(() => { this.__initResourceData(this.__resourceData); this.fireDataEvent("updateQuality", this.__resourceData); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error while updating the Quality Assessment."), "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("There was an issue while updating the Quality Assessment."))) .finally(() => btn.setFetching(false)); } } diff --git a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js index eae2df3f1b9..6b3b54b0994 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js @@ -42,9 +42,10 @@ qx.Class.define("osparc.metadata.ServicesInStudy", { this._studyData = osparc.data.model.Study.deepCloneStudyObject(studyData); - const servicesInStudy = osparc.study.Utils.extractServices(this._studyData["workbench"]); + const servicesInStudy = osparc.study.Utils.extractUniqueServices(this._studyData["workbench"]); if (servicesInStudy.length) { const promises = []; + // the following calls make sure the history of each service is there servicesInStudy.forEach(srv => promises.push(osparc.store.Services.getService(srv.key, srv.version))); Promise.all(promises) .then(() => this._populateLayout()); @@ -76,17 +77,13 @@ qx.Class.define("osparc.metadata.ServicesInStudy", { } this.setEnabled(false); - osparc.info.StudyUtils.patchNodeData(this._studyData, nodeId, patchData) + osparc.store.Study.patchNodeData(this._studyData, nodeId, patchData) .then(() => { this.fireDataEvent("updateService", this._studyData); this._populateLayout(); }) .catch(err => { - if ("message" in err) { - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - } else { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong updating the Service"), "ERROR"); - } + osparc.FlashMessenger.logError(err, this.tr("Something went wrong while updating the service")); }) .finally(() => { if (fetchButton) { @@ -139,7 +136,7 @@ qx.Class.define("osparc.metadata.ServicesInStudy", { infoButton.addListener("execute", () => { const metadata = osparc.store.Services.getMetadata(node["key"], node["version"]); if (metadata === null) { - osparc.FlashMessenger.logAs(this.tr("Service information could not be retrieved"), "WARNING"); + osparc.FlashMessenger.logAs(this.tr("Could not retrieve service information"), "WARNING"); return; } const serviceDetails = new osparc.info.ServiceLarge(metadata, { @@ -166,7 +163,7 @@ qx.Class.define("osparc.metadata.ServicesInStudy", { const nodeMetadata = osparc.store.Services.getMetadata(node["key"], node["version"]); if (nodeMetadata === null) { - osparc.FlashMessenger.logAs(this.tr("Some service information could not be retrieved"), "WARNING"); + osparc.FlashMessenger.logAs(this.tr("Could not retrieve some service information"), "WARNING"); break; } } diff --git a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyBootOpts.js b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyBootOpts.js index 72ad2d4530e..62e507c6a79 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyBootOpts.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyBootOpts.js @@ -76,7 +76,7 @@ qx.Class.define("osparc.metadata.ServicesInStudyBootOpts", { const node = workbench[nodeId]; const nodeMetadata = osparc.store.Services.getMetadata(node["key"], node["version"]); if (nodeMetadata === null) { - osparc.FlashMessenger.logAs(this.tr("Some service information could not be retrieved"), "WARNING"); + osparc.FlashMessenger.logAs(this.tr("Could not retrieve some service information"), "WARNING"); break; } const canIWrite = osparc.data.model.Study.canIWrite(this._studyData["accessRights"]); diff --git a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js index bbd9685bfec..3ab80d4fb7d 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js @@ -27,52 +27,6 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { UPDATE_BUTTON: Object.keys(osparc.metadata.ServicesInStudy.GRID_POS).length+2 }, - anyServiceDeprecated: function(studyData) { - if ("workbench" in studyData) { - return osparc.study.Utils.isWorkbenchDeprecated(studyData["workbench"]); - } - return false; - }, - - anyServiceRetired: function(studyData) { - if ("workbench" in studyData) { - return osparc.study.Utils.isWorkbenchRetired(studyData["workbench"]); - } - return false; - }, - - anyServiceInaccessible: function(studyData) { - if ("workbench" in studyData) { - const inaccessibles = osparc.study.Utils.getInaccessibleServices(studyData["workbench"]); - return inaccessibles.length; - } - return false; - }, - - updatableNodeIds: function(workbench) { - const nodeIds = []; - for (const nodeId in workbench) { - const node = workbench[nodeId]; - if (osparc.service.Utils.isUpdatable(node)) { - nodeIds.push(nodeId); - } - } - return nodeIds; - }, - - getLatestVersion: function(studyData, nodeId) { - if (nodeId in studyData["workbench"]) { - const node = studyData["workbench"][nodeId]; - if (osparc.service.Utils.isUpdatable(node)) { - const latestCompatible = osparc.service.Utils.getLatestCompatible(node["key"], node["version"]); - if (latestCompatible["version"] !== node["version"]) { - return latestCompatible["version"]; - } - } - } - return null; - }, - colorVersionLabel: function(versionLabel, metadata) { const isDeprecated = osparc.service.Utils.isDeprecated(metadata); const isRetired = osparc.service.Utils.isRetired(metadata); @@ -80,13 +34,13 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { versionLabel.set({ textColor: "text-on-warning", // because the background is always yellow backgroundColor: osparc.service.StatusUI.getColor("deprecated"), - toolTipText: qx.locale.Manager.tr("Service deprecated, please update") + toolTipText: qx.locale.Manager.tr("This service is deprecated. Please update.") }); } else if (isRetired) { versionLabel.set({ textColor: "text-on-warning", // because the background is always red backgroundColor: osparc.service.StatusUI.getColor("retired"), - toolTipText: qx.locale.Manager.tr("Service retired, please update") + toolTipText: qx.locale.Manager.tr("This service has been retired. Please update.") }); } } @@ -97,57 +51,58 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { _populateIntroText: async function() { const canIWrite = osparc.data.model.Study.canIWrite(this._studyData["accessRights"]); - const labels = []; - if (this.self().anyServiceInaccessible(this._studyData)) { - const inaccessibleText = this.tr("Some services' information is not accessible. Please contact service owner:"); - const inaccessibleLabel = new qx.ui.basic.Label(inaccessibleText); - labels.push(inaccessibleLabel); - } - if (this.self().anyServiceDeprecated(this._studyData)) { - let deprecatedText = this.tr("Services marked in yellow are deprecated, they will be retired soon."); - if (canIWrite) { - deprecatedText += " " + this.tr("They can be updated by pressing the Update button."); - } - const deprecatedLabel = new qx.ui.basic.Label(deprecatedText); - labels.push(deprecatedLabel); - } - if (this.self().anyServiceRetired(this._studyData)) { - let retiredText = this.tr("Services marked in red are retired: you cannot use them anymore."); - if (canIWrite) { - retiredText += "
" + this.tr("If the Update button is disabled, they might require manual intervention to be updated:"); - retiredText += "
- " + this.tr("Open the study"); - retiredText += "
- " + this.tr("Click on the retired service, download the data"); - retiredText += "
- " + this.tr("Upload the data to an updated version"); + const introText = new qx.ui.basic.Label().set({ + font: "text-14", + rich: true + }); + this._introText.add(introText); + let msg = ""; + const params = { + url: { + studyId: this._studyData["uuid"] } - const retiredLabel = new qx.ui.basic.Label(retiredText); - labels.push(retiredLabel); - } - const updatableServices = this.self().updatableNodeIds(this._studyData["workbench"]); - if (updatableServices.length === 0) { - const upToDateText = this.tr("All services are up to date to their latest compatible version."); - const upToDateLabel = new qx.ui.basic.Label(upToDateText); - labels.push(upToDateLabel); - } else if (canIWrite) { - const useUpdateButtonText = this.tr("Use the Update buttons to bring the services to their latest compatible version."); - const useUpdateButtonLabel = new qx.ui.basic.Label(useUpdateButtonText); - labels.push(useUpdateButtonLabel); - } else { - const notUpToDateText = this.tr("Some services are not up to date."); - const notUpToDateLabel = new qx.ui.basic.Label(notUpToDateText); - labels.push(notUpToDateLabel); - } + }; + osparc.data.Resources.fetch("studies", "getServices", params) + .then(resp => { + const services = resp["services"]; + if (osparc.study.Utils.getCantExecuteServices(services).length) { + msg += this.tr("Some services are inaccessible. Please contact the service owner:"); + msg += "

"; + } + if (osparc.study.Utils.anyServiceRetired(services)) { + msg += this.tr("Services marked in red are retired and can no longer be used."); + if (canIWrite) { + msg += "
" + this.tr("If the Update button is disabled, they might require manual intervention to be updated:"); + msg += "
- " + this.tr("Open the study"); + msg += "
- " + this.tr("Click on the retired service, download the data"); + msg += "
- " + this.tr("Upload the data to a newer version"); + } + msg += "

"; + } + if (osparc.study.Utils.anyServiceDeprecated(services)) { + msg += this.tr("Services marked in yellow are deprecated, they will be retired soon."); + if (canIWrite) { + msg += " " + this.tr("They can be updated by pressing the Update button."); + } + msg += "

"; + } + const anyServiceUpdatable = osparc.study.Utils.anyServiceUpdatable(services); + if (anyServiceUpdatable === false && msg === "") { + msg += this.tr("All services are up to date to their latest compatible version."); + msg += "
"; + } else if (canIWrite) { + msg += this.tr("Click Update to upgrade services to the latest compatible version."); + msg += "
"; + } else { + msg += this.tr("Some services are not up to date."); + } - labels.forEach(label => { - label.set({ - font: "text-14", - rich: true + introText.setValue(msg); }); - this._introText.add(label); - }); }, __updateService: async function(nodeId, key, version, button) { - const latestCompatible = osparc.service.Utils.getLatestCompatible(key, version); + const latestCompatible = osparc.store.Services.getLatestCompatible(key, version); const patchData = {}; if (key !== latestCompatible["key"]) { patchData["key"] = latestCompatible["key"]; @@ -201,6 +156,7 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { const canIWrite = osparc.data.model.Study.canIWrite(this._studyData["accessRights"]); let i = 0; + const updatableServices = []; const workbench = this._studyData["workbench"]; for (const nodeId in workbench) { i++; @@ -218,7 +174,7 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { const compatibleVersionLabel = new qx.ui.basic.Label().set({ font: "text-14" }); - const latestCompatible = osparc.service.Utils.getLatestCompatible(node["key"], node["version"]); + const latestCompatible = osparc.store.Services.getLatestCompatible(node["key"], node["version"]); if (latestCompatible) { // updatable osparc.store.Services.getService(latestCompatible["key"], latestCompatible["version"]) @@ -241,9 +197,9 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { column: this.self().GRID_POS.COMPATIBLE_VERSION }); - const isUpdatable = osparc.service.Utils.isUpdatable(node); if (latestCompatible && canIWrite) { const updateButton = new osparc.ui.form.FetchButton(null, "@MaterialIcons/update/14"); + const isUpdatable = osparc.service.Utils.isUpdatable(metadata); updateButton.set({ enabled: isUpdatable }); @@ -257,6 +213,7 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { label: this.tr("Update"), center: true }); + updatableServices.push(nodeId); } updateButton.addListener("execute", () => this.__updateService(nodeId, node["key"], node["version"], updateButton), this); this._servicesGrid.add(updateButton, { @@ -266,7 +223,6 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { } } - const updatableServices = osparc.metadata.ServicesInStudyUpdate.updatableNodeIds(workbench); if (updatableServices.length && canIWrite) { const updateAllButton = this.__updateAllButton; updateAllButton.show(); diff --git a/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js b/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js index 21089318d18..e69e8b66ded 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js +++ b/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.navigation.BreadcrumbsSlideshow", { if (study.isPipelineEmpty()) { label.setValue(this.tr("Pipeline is empty")); } else { - label.setValue(this.tr("There are no visible nodes, enable some by editing the App Mode")); + label.setValue(this.tr("No visible nodes. Enable some by adjusting the app mode.")); } this._add(label); } diff --git a/services/static-webserver/client/source/class/osparc/navigation/LogoOnOff.js b/services/static-webserver/client/source/class/osparc/navigation/LogoOnOff.js index af2f54164ef..7399efe698d 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/LogoOnOff.js +++ b/services/static-webserver/client/source/class/osparc/navigation/LogoOnOff.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.navigation.LogoOnOff", { check: "Boolean", init: false, nullable: false, - apply: "_applyOnline" + apply: "__applyOnline" } }, @@ -64,6 +64,7 @@ qx.Class.define("osparc.navigation.LogoOnOff", { } case "off-logo": { control = new qx.ui.basic.Image("osparc/offline.svg"); + osparc.utils.Utils.setAltToImage(control, "offline"); const container = this.getChildControl("off-logo-container"); container.add(control, { flex: 1 @@ -80,7 +81,7 @@ qx.Class.define("osparc.navigation.LogoOnOff", { return control || this.base(arguments, id); }, - _applyOnline: function(value) { + __applyOnline: function(value) { this.setSelection([this.getSelectables()[value ? 1 : 0]]); } } diff --git a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js index e8d252a5f82..f9f046dd0b3 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js +++ b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js @@ -50,6 +50,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { paddingLeft: 10, paddingRight: 10, height: this.self().HEIGHT, + backgroundColor: "background-main-1", }); osparc.utils.Utils.setIdToWidget(this, "navigationBar"); @@ -57,7 +58,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { events: { "backToDashboardPressed": "qx.event.type.Event", - "downloadStudyLogs": "qx.event.type.Event" + "openLogger": "qx.event.type.Event" }, properties: { @@ -80,12 +81,6 @@ qx.Class.define("osparc.navigation.NavigationBar", { minWidth: 30, minHeight: 30 }, - - PAGE_CONTEXT: { - 0: "dashboard", - 1: "workbench", - 2: "app" - } }, members: { @@ -97,11 +92,6 @@ qx.Class.define("osparc.navigation.NavigationBar", { }, __buildLayout: function() { - const colorStr = qx.theme.manager.Color.getInstance().resolve("background-main-1"); - const color = qx.util.ColorUtil.stringToRgb(colorStr); - this.getContentElement().setStyles({ - "background": `linear-gradient(0deg, rgba(1, 18, 26, 0.1) 0%, ${qx.util.ColorUtil.rgbToRgbString(color)} 4%)` - }); this.getChildControl("left-items"); this.getChildControl("center-items"); this.getChildControl("right-items"); @@ -127,6 +117,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { // right-items this.getChildControl("tasks-button"); + this.getChildControl("jobs-button"); this.getChildControl("notifications-button"); this.getChildControl("expiration-icon"); this.getChildControl("help"); @@ -206,7 +197,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { break; case "study-title-options": control = new osparc.navigation.StudyTitleWOptions(); - control.addListener("downloadStudyLogs", () => this.fireEvent("downloadStudyLogs")); + control.addListener("openLogger", () => this.fireEvent("openLogger")); this.getChildControl("left-items").add(control); break; case "read-only-info": { @@ -228,14 +219,14 @@ qx.Class.define("osparc.navigation.NavigationBar", { this.getChildControl("center-items").add(control); break; } - case "credits-button": - control = new osparc.desktop.credits.CreditsIndicatorButton(); - this.getChildControl("right-items").add(control); - break; case "tasks-button": control = new osparc.task.TasksButton(); this.getChildControl("right-items").add(control); break; + case "jobs-button": + control = new osparc.jobs.JobsButton(); + this.getChildControl("right-items").add(control); + break; case "notifications-button": control = new osparc.notification.NotificationsButton(); this.getChildControl("right-items").add(control); @@ -272,6 +263,10 @@ qx.Class.define("osparc.navigation.NavigationBar", { osparc.utils.Utils.setIdToWidget(control, "helpNavigationBtn"); this.getChildControl("right-items").add(control); break; + case "credits-button": + control = new osparc.desktop.credits.CreditsIndicatorButton(); + this.getChildControl("right-items").add(control); + break; case "log-in-button": { control = this.__createLoginBtn().set({ visibility: "excluded" @@ -302,7 +297,8 @@ qx.Class.define("osparc.navigation.NavigationBar", { __createHelpMenuBtn: function() { const menu = new qx.ui.menu.Menu().set({ - position: "top-right" + position: "top-right", + appearance: "menu-wider", }); const menuButton = new qx.ui.form.MenuButton(null, "@FontAwesome5Regular/question-circle/22", menu).set({ backgroundColor: "transparent" @@ -310,7 +306,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { osparc.utils.Utils.setIdToWidget(menu, "helpNavigationMenu"); - // menus + // quick starts and manuals osparc.store.Support.addQuickStartToMenu(menu); osparc.store.Support.addGuidedToursToMenu(menu); osparc.store.Support.addManualButtonsToMenu(menu, menuButton); @@ -318,8 +314,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { // feedback osparc.store.Support.addSupportButtonsToMenu(menu, menuButton); - - osparc.utils.Utils.prettifyMenu(menu); + osparc.store.Support.addReleaseNotesToMenu(menu); return menuButton; }, @@ -330,6 +325,15 @@ qx.Class.define("osparc.navigation.NavigationBar", { return registerButton; }, + addDashboardTabButtons: function(tabButtons) { + this.__tabButtons = tabButtons; + this.getChildControl("center-items").add(tabButtons); + this.bind("study", this.__tabButtons, "visibility", { + converter: s => s ? "excluded" : "visible" + }); + this.__navBarResized(); + }, + __applyStudy: function(study) { const readOnlyInfo = this.getChildControl("read-only-info") if (study) { diff --git a/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js b/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js index 342f0de8cee..4c9c4cff32e 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js +++ b/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js @@ -31,7 +31,7 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { }, events: { - "downloadStudyLogs": "qx.event.type.Event" + "openLogger": "qx.event.type.Event" }, properties: { @@ -68,22 +68,62 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { }); }); break; - case "study-menu-download-logs": + case "study-menu-reload": control = new qx.ui.menu.Button().set({ - label: this.tr("Download logs"), + label: this.tr("Reload"), + icon: "@FontAwesome5Solid/redo-alt/12", + }); + control.addListener("execute", () => this.__reloadIFrame(), this); + break; + case "study-menu-convert-to-pipeline": + control = new qx.ui.menu.Button().set({ + label: this.tr("Convert to Pipeline"), + icon: null, + }); + control.addListener("execute", () => { + this.getStudy().getUi().setMode("workbench"); + }); + break; + case "study-menu-convert-to-standalone": + control = new qx.ui.menu.Button().set({ + label: this.tr("Convert to Standalone"), + icon: null, + }); + control.addListener("execute", () => { + this.getStudy().getUi().setMode("standalone"); + }); + break; + case "study-menu-restore": + control = new qx.ui.menu.Button().set({ + label: this.tr("Restore"), + icon: osparc.theme.common.Image.URLS["window-restore"] + "/20", + }); + control.addListener("execute", () => { + this.getStudy().getUi().setMode("workbench"); + }); + break; + case "study-menu-open-logger": + control = new qx.ui.menu.Button().set({ + label: this.tr("Platform Logs..."), icon: "@FontAwesome5Solid/download/14" }); - control.addListener("execute", () => this.fireEvent("downloadStudyLogs")); + control.addListener("execute", () => this.fireEvent("openLogger")); break; case "study-menu-button": { const optionsMenu = new qx.ui.menu.Menu(); + optionsMenu.setAppearance("menu-wider"); optionsMenu.add(this.getChildControl("study-menu-info")); - optionsMenu.add(this.getChildControl("study-menu-download-logs")); + optionsMenu.add(this.getChildControl("study-menu-reload")); + optionsMenu.add(this.getChildControl("study-menu-convert-to-pipeline")); + optionsMenu.add(this.getChildControl("study-menu-convert-to-standalone")); + optionsMenu.add(this.getChildControl("study-menu-restore")); + optionsMenu.add(this.getChildControl("study-menu-open-logger")); control = new qx.ui.form.MenuButton().set({ appearance: "fab-button", menu: optionsMenu, icon: "@FontAwesome5Solid/ellipsis-v/14", - allowGrowY: false + allowGrowY: false, + width: 24, }); this._add(control); break; @@ -104,9 +144,50 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { return control || this.base(arguments, id); }, + __reloadIFrame: function() { + const nodes = this.getStudy().getWorkbench().getNodes(); + if (Object.keys(nodes).length === 1) { + Object.values(nodes)[0].getIframeHandler().restartIFrame(); + } + }, + __applyStudy: function(study) { if (study) { - study.bind("name", this.getChildControl("edit-title-label"), "value"); + const editTitle = this.getChildControl("edit-title-label"); + study.bind("name", editTitle, "value"); + + const reloadButton = this.getChildControl("study-menu-reload"); + study.getUi().bind("mode", reloadButton, "visibility", { + converter: mode => mode === "standalone" ? "visible" : "excluded" + }); + + const convertToPipelineButton = this.getChildControl("study-menu-convert-to-pipeline"); + const convertToStandaloneButton = this.getChildControl("study-menu-convert-to-standalone"); + if (osparc.product.Utils.hasConvertToPipelineEnabled()) { + study.getUi().bind("mode", convertToPipelineButton, "visibility", { + converter: mode => mode === "standalone" ? "visible" : "excluded" + }); + + const evaluateConvertToStandaloneButton = () => { + // exclude until we have the export to standalone backend functionality + convertToStandaloneButton.exclude(); + }; + study.getWorkbench().addListener("pipelineChanged", () => evaluateConvertToStandaloneButton()); + study.getUi().addListener("changeMode", () => evaluateConvertToStandaloneButton()); + } else { + convertToPipelineButton.exclude(); + convertToStandaloneButton.exclude(); + } + + const restoreButton = this.getChildControl("study-menu-restore"); + study.getUi().bind("mode", restoreButton, "visibility", { + converter: mode => mode === "standalone" ? "visible" : "excluded" + }); + + const loggerButton = this.getChildControl("study-menu-open-logger"); + study.getUi().bind("mode", loggerButton, "visibility", { + converter: mode => mode === "standalone" ? "visible" : "excluded" + }); } else { this.exclude(); } diff --git a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js index b96841de7d9..dfe8898e0e5 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js +++ b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js @@ -22,7 +22,7 @@ qx.Class.define("osparc.navigation.UserMenu", { this.base(arguments); this.set({ - font: "text-14" + appearance: "menu-wider", }); }, @@ -77,12 +77,6 @@ qx.Class.define("osparc.navigation.UserMenu", { }, this); this.add(control); break; - case "preferences": - control = new qx.ui.menu.Button(this.tr("Preferences")); - control.addListener("execute", () => osparc.navigation.UserMenuButton.openPreferences(), this); - osparc.utils.Utils.setIdToWidget(control, "userMenuPreferencesBtn"); - this.add(control); - break; case "organizations": control = new qx.ui.menu.Button(this.tr("Organizations")).set({ visibility: osparc.data.Permissions.getInstance().canDo("user.organizations.create") ? "visible" :"excluded" @@ -92,7 +86,7 @@ qx.Class.define("osparc.navigation.UserMenu", { this.add(control); break; case "market": - control = new qx.ui.menu.Button(this.tr("Market")); + control = new qx.ui.menu.Button(this.tr("The Shop")); control.addListener("execute", () => osparc.vipMarket.MarketWindow.openWindow()); this.add(control); break; @@ -162,7 +156,6 @@ qx.Class.define("osparc.navigation.UserMenu", { if (osparc.desktop.credits.Utils.areWalletsEnabled()) { this.getChildControl("billing-center"); } - this.getChildControl("preferences"); this.getChildControl("organizations"); } this.addSeparator(); @@ -187,8 +180,6 @@ qx.Class.define("osparc.navigation.UserMenu", { this.addSeparator(); this.getChildControl("log-out"); - - osparc.utils.Utils.prettifyMenu(this); }, __addAnnouncements: function() { @@ -217,21 +208,19 @@ qx.Class.define("osparc.navigation.UserMenu", { if (osparc.desktop.credits.Utils.areWalletsEnabled()) { this.getChildControl("billing-center"); } - this.getChildControl("preferences"); this.getChildControl("organizations"); } this.addSeparator(); - // quick starts + // quick starts and manuals osparc.store.Support.addQuickStartToMenu(this); osparc.store.Support.addGuidedToursToMenu(this); - - // manuals osparc.store.Support.addManualButtonsToMenu(this); this.addSeparator(); // feedbacks osparc.store.Support.addSupportButtonsToMenu(this); + osparc.store.Support.addReleaseNotesToMenu(this); this.addSeparator(); this.getChildControl("theme-switcher"); @@ -253,8 +242,6 @@ qx.Class.define("osparc.navigation.UserMenu", { } this.addSeparator(); this.getChildControl("log-out"); - - osparc.utils.Utils.prettifyMenu(this); } } }); diff --git a/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js b/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js index e1726533215..777834d2739 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js +++ b/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js @@ -70,13 +70,6 @@ qx.Class.define("osparc.navigation.UserMenuButton", { }); }, - statics: { - openPreferences: function() { - const preferencesWindow = osparc.desktop.preferences.PreferencesWindow.openWindow(); - return preferencesWindow; - } - }, - members: { __forceNullColor: null, diff --git a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js index 5f810b18799..ea6ed0fbb28 100644 --- a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js +++ b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js @@ -110,7 +110,7 @@ qx.Class.define("osparc.node.LifeCycleView", { }); updateButton.addListener("execute", () => { updateButton.setFetching(true); - const latestCompatible = osparc.service.Utils.getLatestCompatible(node.getKey(), node.getVersion()); + const latestCompatible = osparc.store.Services.getLatestCompatible(node.getKey(), node.getVersion()); if (node.getKey() !== latestCompatible["key"]) { node.setKey(latestCompatible["key"]); } diff --git a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js index f23b6077499..b4432cedecd 100644 --- a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js +++ b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js @@ -46,12 +46,11 @@ qx.Class.define("osparc.node.TierSelectionView", { tiersLayout.add(tierBox); const node = this.getNode(); - const pricingStore = osparc.store.Pricing.getInstance(); - pricingStore.fetchPricingPlansService(node.getKey(), node.getVersion()) + osparc.store.Pricing.getInstance().fetchPricingPlansService(node.getKey(), node.getVersion()) .then(pricingPlans => { if (pricingPlans && "pricingUnits" in pricingPlans && pricingPlans["pricingUnits"].length) { - const pricingUnits = pricingPlans["pricingUnits"].map(princingUnitData => { - const pricingUnit = new osparc.data.model.PricingUnit(princingUnitData); + const pricingUnits = pricingPlans["pricingUnits"].map(pricingUnitData => { + const pricingUnit = new osparc.data.model.PricingUnit(pricingUnitData); return pricingUnit; }); pricingUnits.forEach(pricingUnit => { diff --git a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js index 3c75815c296..507eef686dd 100644 --- a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js +++ b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js @@ -154,13 +154,9 @@ qx.Class.define("osparc.node.UpdateResourceLimitsView", { }; osparc.data.Resources.fetch("nodesInStudyResources", "put", params) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Limits successfully updated")); - }) - .catch(err => { - console.error(err); - const msg = err.message || this.tr("Something went wrong updating the limits"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + osparc.FlashMessenger.logAs(this.tr("Limits have been successfully updated")); }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while updating the limits"))) .finally(() => { this.__saveBtn.setFetching(false); this.__populateLayout(); diff --git a/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js b/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js index 4aeb8b0b4a6..1d9cd803c8b 100644 --- a/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js +++ b/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js @@ -38,13 +38,6 @@ qx.Class.define("osparc.node.slideshow.NodeView", { statics: { LOGGER_HEIGHT: 28, - - isPropsFormShowable: function(node) { - if (node && ("getPropsForm" in node) && node.getPropsForm()) { - return node.getPropsForm().hasVisibleInputs(); - } - return false; - } }, members: { @@ -135,8 +128,8 @@ qx.Class.define("osparc.node.slideshow.NodeView", { }, isSettingsGroupShowable: function() { - const node = this.getNode(); - return this.self().isPropsFormShowable(node); + // do not show Settings in App Mode + return false; }, __iFrameChanged: function() { diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js index f339a68bf06..5b0ecd6cdae 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js @@ -150,7 +150,7 @@ qx.Class.define("osparc.notification.NotificationUI", { "studyId": resourceId } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(study => { const studyAlias = osparc.product.Utils.getStudyAlias({ firstUpperCase: true @@ -194,7 +194,7 @@ qx.Class.define("osparc.notification.NotificationUI", { "studyId": resourceId } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(study => titleLabel.setValue(`Note added in '${study["name"]}'`)) .catch(() => this.setEnabled(false)); } @@ -273,7 +273,7 @@ qx.Class.define("osparc.notification.NotificationUI", { orgsWindow.openOrganizationDetails(orgId); } else { const msg = this.tr("You don't have access anymore"); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } }, @@ -283,7 +283,7 @@ qx.Class.define("osparc.notification.NotificationUI", { "studyId": studyId } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(studyData => { if (studyData) { const studyDataCopy = osparc.data.model.Study.deepCloneStudyObject(studyData); @@ -299,9 +299,9 @@ qx.Class.define("osparc.notification.NotificationUI", { } }) .catch(err => { - console.error(err); + console.warn(err); const msg = this.tr("You don't have access anymore"); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); }); }, @@ -326,7 +326,7 @@ qx.Class.define("osparc.notification.NotificationUI", { } } else { const msg = this.tr("You don't have access anymore"); - osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); + osparc.FlashMessenger.logAs(msg, "WARNING"); } } } diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js b/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js index dd8d4b543d4..3c1cfd12152 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js @@ -53,8 +53,10 @@ qx.Class.define("osparc.notification.NotificationsButton", { case "icon": { control = new qx.ui.basic.Image(); const iconContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ - alignY: "middle" - })); + alignY: "middle", + })).set({ + paddingLeft: 5, + }); iconContainer.add(control); this._add(iconContainer, { height: "100%" diff --git a/services/static-webserver/client/source/class/osparc/po/Invitations.js b/services/static-webserver/client/source/class/osparc/po/Invitations.js index 0b1493e35f7..0fb58f8aee3 100644 --- a/services/static-webserver/client/source/class/osparc/po/Invitations.js +++ b/services/static-webserver/client/source/class/osparc/po/Invitations.js @@ -116,10 +116,7 @@ qx.Class.define("osparc.po.Invitations", { .then(data => { this.__populateInvitationLayout(data); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => generateInvitationBtn.setFetching(false)); } }, this); diff --git a/services/static-webserver/client/source/class/osparc/po/MessageTemplates.js b/services/static-webserver/client/source/class/osparc/po/MessageTemplates.js index 8443a7fa788..6a6bdadd674 100644 --- a/services/static-webserver/client/source/class/osparc/po/MessageTemplates.js +++ b/services/static-webserver/client/source/class/osparc/po/MessageTemplates.js @@ -92,10 +92,7 @@ qx.Class.define("osparc.po.MessageTemplates", { }; osparc.data.Resources.fetch("productMetadata", "updateEmailTemplate", params) .then(() => osparc.FlashMessenger.logAs(this.tr("Template updated"), "INFO")) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this._buildLayout()); } } diff --git a/services/static-webserver/client/source/class/osparc/po/POCenter.js b/services/static-webserver/client/source/class/osparc/po/POCenter.js index 6141d5ffaf2..39a10d9afdb 100644 --- a/services/static-webserver/client/source/class/osparc/po/POCenter.js +++ b/services/static-webserver/client/source/class/osparc/po/POCenter.js @@ -24,7 +24,7 @@ qx.Class.define("osparc.po.POCenter", { const miniProfile = osparc.desktop.account.MyAccount.createMiniProfileView().set({ paddingRight: 10 }); - this.addWidgetOnTopOfTheTabs(miniProfile); + this.addWidgetToTabs(miniProfile); this.__addUsersPage(); this.__addPreRegistrationPage(); @@ -42,7 +42,7 @@ qx.Class.define("osparc.po.POCenter", { }, __addPreRegistrationPage: function() { - const title = this.tr("PreRegistration"); + const title = this.tr("Pre-Registration"); const iconSrc = "@FontAwesome5Solid/address-card/22"; const preRegistration = new osparc.po.PreRegistration(); this.addTab(title, iconSrc, preRegistration); diff --git a/services/static-webserver/client/source/class/osparc/po/PreRegistration.js b/services/static-webserver/client/source/class/osparc/po/PreRegistration.js index 8a1f0e767df..b1d6b80088a 100644 --- a/services/static-webserver/client/source/class/osparc/po/PreRegistration.js +++ b/services/static-webserver/client/source/class/osparc/po/PreRegistration.js @@ -79,7 +79,7 @@ qx.Class.define("osparc.po.PreRegistration", { if (form.validate()) { submitBtn.setFetching(true); - const flashErrorMsg = this.tr("Pre-Registration Failed. See details below"); + const flashErrorMsg = this.tr("Unsuccessful Pre-Registration. See details below"); const findingStatus = this.getChildControl("finding-status"); findingStatus.setValue(this.tr("Searching Pre-Registered users...")); @@ -89,12 +89,9 @@ qx.Class.define("osparc.po.PreRegistration", { data: JSON.parse(requestAccountData.getValue()) }; } catch (err) { - console.error(err); - const detailErrorMsg = `Error parsing Request Form JSON. ${err}`; findingStatus.setValue(detailErrorMsg); - - osparc.FlashMessenger.logAs(flashErrorMsg, "ERROR"); + osparc.FlashMessenger.logError(err, flashErrorMsg); submitBtn.setFetching(false); return } @@ -109,10 +106,9 @@ qx.Class.define("osparc.po.PreRegistration", { this.__populatePreRegistrationLayout(data); }) .catch(err => { - const detailErrorMsg = this.tr(`Error during Pre-Registeristration: ${err.message}`) + const detailErrorMsg = this.tr(`Error during Pre-Registration: ${err.message}`) findingStatus.setValue(detailErrorMsg); - console.error(err); - osparc.FlashMessenger.logAs(flashErrorMsg, "ERROR"); + osparc.FlashMessenger.logError(err, flashErrorMsg); }) .finally(() => submitBtn.setFetching(false)); } diff --git a/services/static-webserver/client/source/class/osparc/po/Users.js b/services/static-webserver/client/source/class/osparc/po/Users.js index eb011712b42..de8164957dd 100644 --- a/services/static-webserver/client/source/class/osparc/po/Users.js +++ b/services/static-webserver/client/source/class/osparc/po/Users.js @@ -90,8 +90,7 @@ qx.Class.define("osparc.po.Users", { }) .catch(err => { findingStatus.setValue(this.tr("Error searching users")); - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logError(err); }) .finally(() => searchBtn.setFetching(false)); } diff --git a/services/static-webserver/client/source/class/osparc/pricing/PlanDetails.js b/services/static-webserver/client/source/class/osparc/pricing/PlanDetails.js index 22c79cd9741..a745dbf1509 100644 --- a/services/static-webserver/client/source/class/osparc/pricing/PlanDetails.js +++ b/services/static-webserver/client/source/class/osparc/pricing/PlanDetails.js @@ -33,7 +33,27 @@ qx.Class.define("osparc.pricing.PlanDetails", { "backToPricingPlans": "qx.event.type.Event" }, + statics: { + createTabPage: function(label, icon) { + const tabPage = new qx.ui.tabview.Page().set({ + layout: new qx.ui.layout.VBox() + }); + if (label) { + tabPage.setLabel(label); + } + if (icon) { + tabPage.setIcon(icon); + } + tabPage.getChildControl("button").set({ + font: "text-13" + }); + return tabPage; + } + }, + members: { + __servicesPage: null, + _createChildControlImpl: function(id) { let control; let layout; @@ -44,7 +64,7 @@ qx.Class.define("osparc.pricing.PlanDetails", { break; case "back-to-pp-button": control = new qx.ui.form.Button().set({ - toolTipText: this.tr("Back to Pricing Plans"), + toolTipText: this.tr("Return to Pricing Plans"), icon: "@FontAwesome5Solid/arrow-left/20", backgroundColor: "transparent" }); @@ -68,7 +88,7 @@ qx.Class.define("osparc.pricing.PlanDetails", { break; case "pricing-units": { control = new osparc.pricing.UnitsList(); - const tabPage = this.__createTabPage(this.tr("Pricing Units"), "@FontAwesome5Solid/paw/14"); + const tabPage = this.self().createTabPage(this.tr("Pricing Units"), "@FontAwesome5Solid/paw/14"); tabPage.add(control, { flex: 1 }); @@ -78,7 +98,7 @@ qx.Class.define("osparc.pricing.PlanDetails", { } case "service-list": { control = new osparc.pricing.ServicesList(); - const tabPage = this.__createTabPage(this.tr("Services"), "@FontAwesome5Solid/cogs/14"); + const tabPage = this.__servicesPage = this.self().createTabPage(this.tr("Services"), "@FontAwesome5Solid/cogs/14"); tabPage.add(control, { flex: 1 }); @@ -90,38 +110,27 @@ qx.Class.define("osparc.pricing.PlanDetails", { return control || this.base(arguments, id); }, - setCurrentPricingPlan: function(pricingPlanModel) { - if (pricingPlanModel === null) { + setCurrentPricingPlan: function(pricingPlan) { + if (pricingPlan === null) { return; } const pricingPlanListItem = this.getChildControl("pricing-plan-details"); - pricingPlanModel.bind("model", pricingPlanListItem, "model"); - pricingPlanModel.bind("ppId", pricingPlanListItem, "ppId"); - pricingPlanModel.bind("ppKey", pricingPlanListItem, "ppKey"); - pricingPlanModel.bind("title", pricingPlanListItem, "title"); - pricingPlanModel.bind("description", pricingPlanListItem, "description"); - pricingPlanModel.bind("isActive", pricingPlanListItem, "isActive"); + pricingPlan.bind("model", pricingPlanListItem, "model"); + pricingPlan.bind("ppId", pricingPlanListItem, "ppId"); + pricingPlan.bind("ppKey", pricingPlanListItem, "ppKey"); + pricingPlan.bind("title", pricingPlanListItem, "title"); + pricingPlan.bind("description", pricingPlanListItem, "description"); + pricingPlan.bind("isActive", pricingPlanListItem, "isActive"); // set PricingPlanId to the tab views - this.getChildControl("pricing-units").setPricingPlanId(pricingPlanModel.getModel()); - this.getChildControl("service-list").setPricingPlanId(pricingPlanModel.getModel()); - }, + this.getChildControl("pricing-units").setPricingPlanId(pricingPlan.getModel()); + this.getChildControl("service-list").setPricingPlanId(pricingPlan.getModel()); - __createTabPage: function(label, icon) { - const tabPage = new qx.ui.tabview.Page().set({ - layout: new qx.ui.layout.VBox() - }); - if (label) { - tabPage.setLabel(label); - } - if (icon) { - tabPage.setIcon(icon); - } - tabPage.getChildControl("button").set({ - font: "text-13" + // show services only if it's a TIER pricing plan + this.__servicesPage.getChildControl("button").set({ + visibility: pricingPlan.getClassification() === "TIER" ? "visible" : "excluded" }); - return tabPage; - } + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/pricing/PlanEditor.js b/services/static-webserver/client/source/class/osparc/pricing/PlanEditor.js index 50543ee77e5..bcaa379e69e 100644 --- a/services/static-webserver/client/source/class/osparc/pricing/PlanEditor.js +++ b/services/static-webserver/client/source/class/osparc/pricing/PlanEditor.js @@ -217,13 +217,12 @@ qx.Class.define("osparc.pricing.PlanEditor", { }; osparc.store.Pricing.getInstance().postPricingPlan(newPricingPlanData) .then(() => { - osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully created")); + osparc.FlashMessenger.logAs(name + this.tr(" successfully created")); this.fireEvent("done"); }) .catch(err => { - const errorMsg = err.message || this.tr("Something went wrong creating ") + name; - osparc.FlashMessenger.getInstance().logAs(errorMsg, "ERROR"); - console.error(err); + const errorMsg = this.tr("Something went wrong while creating ") + name; + osparc.FlashMessenger.logError(err, errorMsg); }) .finally(() => this.getChildControl("create").setFetching(false)); }, @@ -238,14 +237,10 @@ qx.Class.define("osparc.pricing.PlanEditor", { }; osparc.store.Pricing.getInstance().putPricingPlan(this.__pricingPlan["pricingPlanId"], updateData) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Successfully updated")); + osparc.FlashMessenger.logAs(this.tr("Successfully updated")); this.fireEvent("done"); }) - .catch(err => { - const errorMsg = err.message || this.tr("Something went wrong"); - osparc.FlashMessenger.getInstance().logAs(errorMsg, "ERROR"); - console.error(err); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.getChildControl("save").setFetching(false)); } } diff --git a/services/static-webserver/client/source/class/osparc/pricing/PlanListItem.js b/services/static-webserver/client/source/class/osparc/pricing/PlanListItem.js index a76527e35fd..9906b10ab3a 100644 --- a/services/static-webserver/client/source/class/osparc/pricing/PlanListItem.js +++ b/services/static-webserver/client/source/class/osparc/pricing/PlanListItem.js @@ -120,7 +120,6 @@ qx.Class.define("osparc.pricing.PlanListItem", { control = new qx.ui.basic.Label().set({ font: "text-14", alignY: "middle", - width: 35, }); this._add(control, { row: 0, @@ -132,7 +131,6 @@ qx.Class.define("osparc.pricing.PlanListItem", { control = new qx.ui.basic.Label().set({ font: "text-14", alignY: "middle", - width: 80, }); this._add(control, { row: 0, @@ -164,7 +162,6 @@ qx.Class.define("osparc.pricing.PlanListItem", { control = new qx.ui.basic.Label().set({ font: "text-14", alignY: "middle", - width: 60, }); this.bind("classification", control, "value"); this._add(control, { diff --git a/services/static-webserver/client/source/class/osparc/pricing/ServicesList.js b/services/static-webserver/client/source/class/osparc/pricing/ServicesList.js index eb215da3c76..9c90f83b28e 100644 --- a/services/static-webserver/client/source/class/osparc/pricing/ServicesList.js +++ b/services/static-webserver/client/source/class/osparc/pricing/ServicesList.js @@ -77,29 +77,42 @@ qx.Class.define("osparc.pricing.ServicesList", { .then(data => this.__populateList(data)); }, - __populateList: function(services) { - // before accessing the metadata in a sync way, we need to bring them to the cache - const metadataPromises = []; - services.forEach(service => { + __populateList: async function(services) { + const failedServices = []; + const servicePromises = services.map(async service => { const key = service["serviceKey"]; const version = service["serviceVersion"]; - metadataPromises.push(osparc.store.Services.getService(key, version)); - }); - Promise.all(metadataPromises) - .catch(err => console.error(err)) - .finally(() => { - const sList = []; - services.forEach(service => { - const key = service["serviceKey"]; - const version = service["serviceVersion"]; - const serviceMetadata = osparc.store.Services.getMetadata(key, version); - if (serviceMetadata) { - sList.push(new osparc.data.model.Service(serviceMetadata)); - } + try { + return await osparc.store.Services.getService(key, version); + } catch (err) { + console.error(err); + failedServices.push({ + key: service["serviceKey"], + version: service["serviceVersion"], }); - const servicesList = this.getChildControl("services-list"); - servicesList.setModel(new qx.data.Array(sList)); - }) + return null; // Return null to maintain array structure + } + }); + + const serviceModels = new qx.data.Array(); + // ensure that even if one request fails, the rest continue executing + const results = await Promise.allSettled(servicePromises); + results.forEach(result => { + if (result.status === "fulfilled" && result.value) { + const serviceMetadata = result.value; + serviceModels.push(new osparc.data.model.Service(serviceMetadata)); + } + }); + const servicesList = this.getChildControl("services-list"); + servicesList.setModel(serviceModels); + + if (failedServices.length) { + let msg = "Could not retrieve data from some services:
"; + failedServices.forEach(failedService => { + msg+= `- ${failedService.key}:${failedService.version}
`; + }); + osparc.FlashMessenger.logAs(msg, "WARNING"); + } }, __openAddServiceToPlan: function() { diff --git a/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js b/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js index 38f9022172e..671d4a53419 100644 --- a/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js +++ b/services/static-webserver/client/source/class/osparc/pricing/UnitEditor.js @@ -18,45 +18,29 @@ qx.Class.define("osparc.pricing.UnitEditor", { extend: qx.ui.core.Widget, - construct: function(pricingUnit) { + construct: function(pricingPlanId, pricingUnit) { this.base(arguments); this._setLayout(new qx.ui.layout.VBox(10)); - const unitName = this.getChildControl("unit-name"); - const costPerUnit = this.getChildControl("cost-per-unit"); - this.getChildControl("comment"); - const specificInfo = this.getChildControl("specific-info"); - const unitExtraInfoCPU = this.getChildControl("unit-extra-info-cpu"); - const unitExtraInfoRAM = this.getChildControl("unit-extra-info-ram"); - const unitExtraInfoVRAM = this.getChildControl("unit-extra-info-vram"); - const unitExtraInfo = this.getChildControl("unit-extra-info"); - this.getChildControl("is-default"); - - const manager = this.__validator = new qx.ui.form.validation.Manager(); - unitName.setRequired(true); - costPerUnit.setRequired(true); - unitExtraInfoCPU.setRequired(true); - unitExtraInfoRAM.setRequired(true); - unitExtraInfoVRAM.setRequired(true); - unitExtraInfo.setRequired(true); - manager.add(unitName); - manager.add(costPerUnit); - manager.add(specificInfo); - manager.add(unitExtraInfo); + this.__validator = new qx.ui.form.validation.Manager(); + + this.set({ + pricingPlanId + }); if (pricingUnit) { this.set({ pricingUnitId: pricingUnit.getPricingUnitId(), unitName: pricingUnit.getName(), costPerUnit: pricingUnit.getCost(), + default: pricingUnit.getIsDefault(), }); + const extraInfo = osparc.utils.Utils.deepCloneObject(pricingUnit.getExtraInfo()); if (pricingUnit.getClassification() === "TIER") { this.set({ specificInfo: pricingUnit.getSpecificInfo() && pricingUnit.getSpecificInfo()["aws_ec2_instances"] ? pricingUnit.getSpecificInfo()["aws_ec2_instances"].toString() : "", - default: pricingUnit.getIsDefault(), }); - const extraInfo = osparc.utils.Utils.deepCloneObject(pricingUnit.getExtraInfo()); // extract the required fields from the unitExtraInfo this.set({ unitExtraInfoCPU: extraInfo["CPU"], @@ -69,6 +53,11 @@ qx.Class.define("osparc.pricing.UnitEditor", { this.set({ unitExtraInfo: extraInfo }); + } else if (pricingUnit.getClassification() === "LICENSE") { + // extract the required fields from the unitExtraInfo + this.set({ + unitExtraInfoNSeats: extraInfo["num_of_seats"], + }); } this.getChildControl("save"); } else { @@ -81,6 +70,7 @@ qx.Class.define("osparc.pricing.UnitEditor", { check: "Number", init: null, nullable: false, + apply: "__applyPricingPlanId" }, pricingUnitId: { @@ -146,6 +136,13 @@ qx.Class.define("osparc.pricing.UnitEditor", { event: "changeUnitExtraInfo" }, + unitExtraInfoNSeats: { + check: "Number", + init: 1, + nullable: false, + event: "changeUnitExtraInfoNSeats" + }, + default: { check: "Boolean", init: true, @@ -175,6 +172,8 @@ qx.Class.define("osparc.pricing.UnitEditor", { control = new qx.ui.form.TextField().set({ font: "text-14" }); + control.setRequired(true); + this.__validator.add(control); this.bind("unitName", control, "value"); control.bind("value", this, "unitName"); this.getChildControl("unit-form").add(control, this.tr("Unit Name")); @@ -184,6 +183,8 @@ qx.Class.define("osparc.pricing.UnitEditor", { minimum: 0, maximum: 10000 }); + control.setRequired(true); + this.__validator.add(control); this.bind("costPerUnit", control, "value"); control.bind("value", this, "costPerUnit"); this.getChildControl("unit-form").add(control, this.tr("Cost per unit")); @@ -200,6 +201,7 @@ qx.Class.define("osparc.pricing.UnitEditor", { control = new qx.ui.form.TextArea().set({ font: "text-14" }); + this.__validator.add(control); this.bind("specificInfo", control, "value"); control.bind("value", this, "specificInfo"); this.getChildControl("unit-form").add(control, this.tr("Specific info")); @@ -210,6 +212,7 @@ qx.Class.define("osparc.pricing.UnitEditor", { minimum: 0, maximum: 10000 }); + control.setRequired(true); this.bind("unitExtraInfoCPU", control, "value"); control.bind("value", this, "unitExtraInfoCPU"); this.getChildControl("unit-form").add(control, this.tr("CPU")); @@ -220,6 +223,7 @@ qx.Class.define("osparc.pricing.UnitEditor", { minimum: 0, maximum: 10000 }); + control.setRequired(true); this.bind("unitExtraInfoRAM", control, "value"); control.bind("value", this, "unitExtraInfoRAM"); this.getChildControl("unit-form").add(control, this.tr("RAM")); @@ -230,6 +234,7 @@ qx.Class.define("osparc.pricing.UnitEditor", { minimum: 0, maximum: 10000 }); + control.setRequired(true); this.bind("unitExtraInfoVRAM", control, "value"); control.bind("value", this, "unitExtraInfoVRAM"); this.getChildControl("unit-form").add(control, this.tr("VRAM")); @@ -239,6 +244,8 @@ qx.Class.define("osparc.pricing.UnitEditor", { control = new qx.ui.form.TextField().set({ font: "text-14" }); + control.setRequired(true); + this.__validator.add(control); this.bind("unitExtraInfo", control, "value", { converter: v => JSON.stringify(v) }); @@ -248,6 +255,18 @@ qx.Class.define("osparc.pricing.UnitEditor", { this.getChildControl("unit-form").add(control, this.tr("More Extra Info")); break; } + case "unit-extra-info-n-seats": { + control = new qx.ui.form.Spinner().set({ + minimum: 1, + maximum: 10000 + }); + control.setRequired(true); + this.__validator.add(control); + this.bind("unitExtraInfoNSeats", control, "value"); + control.bind("value", this, "unitExtraInfoNSeats"); + this.getChildControl("unit-form").add(control, this.tr("Number of Seats")); + break; + } case "is-default": { control = new qx.ui.form.CheckBox().set({ value: true @@ -302,87 +321,111 @@ qx.Class.define("osparc.pricing.UnitEditor", { return control || this.base(arguments, id); }, - __createPricingUnit: function() { - const unitName = this.getUnitName(); - const costPerUnit = this.getCostPerUnit(); - const comment = this.getComment(); - const awsEc2Instances = []; - const specificInfo = this.getSpecificInfo(); - if (specificInfo) { - awsEc2Instances.push(specificInfo); + __applyPricingPlanId: function(pricingPlanId) { + const pricingPlan = osparc.store.Pricing.getInstance().getPricingPlan(pricingPlanId); + if (pricingPlan) { + this.getChildControl("unit-name"); + this.getChildControl("cost-per-unit"); + this.getChildControl("comment"); + if (pricingPlan.getClassification() === "TIER") { + this.getChildControl("specific-info"); + this.getChildControl("unit-extra-info-cpu"); + this.getChildControl("unit-extra-info-ram"); + this.getChildControl("unit-extra-info-vram"); + this.getChildControl("unit-extra-info"); + } else if (pricingPlan.getClassification() === "LICENSE") { + this.getChildControl("unit-extra-info-n-seats"); + } + this.getChildControl("is-default"); } - const extraInfo = {}; - extraInfo["CPU"] = this.getUnitExtraInfoCPU(); - extraInfo["RAM"] = this.getUnitExtraInfoRAM(); - extraInfo["VRAM"] = this.getUnitExtraInfoVRAM(); - Object.assign(extraInfo, this.getUnitExtraInfo()); - const isDefault = this.getDefault(); - const params = { - url: { - "pricingPlanId": this.getPricingPlanId() - }, - data: { - "unitName": unitName, - "costPerUnit": costPerUnit, - "comment": comment, - "specificInfo": { + }, + + __createPricingUnit: function() { + const data = { + "unitName": this.getUnitName(), + "costPerUnit": this.getCostPerUnit(), + "comment": this.getComment(), + "default": this.getDefault(), + }; + + const pricingPlan = osparc.store.Pricing.getInstance().getPricingPlan(this.getPricingPlanId()); + if (pricingPlan) { + if (pricingPlan.getClassification() === "TIER") { + const awsEc2Instances = []; + const specificInfo = this.getSpecificInfo(); + if (specificInfo) { + awsEc2Instances.push(specificInfo); + } + data["specificInfo"] = { "aws_ec2_instances": awsEc2Instances - }, - "unitExtraInfo": extraInfo, - "default": isDefault + }; + const extraInfo = { + "CPU": this.getUnitExtraInfoCPU(), + "RAM": this.getUnitExtraInfoRAM(), + "VRAM": this.getUnitExtraInfoVRAM(), + }; + Object.assign(extraInfo, this.getUnitExtraInfo()); + data["unitExtraInfo"] = extraInfo; + } else if (pricingPlan.getClassification() === "LICENSE") { + data["specificInfo"] = { + "aws_ec2_instances": [], + }; + data["unitExtraInfo"] = { + "num_of_seats": this.getUnitExtraInfoNSeats(), + }; } - }; - osparc.data.Resources.fetch("pricingUnits", "post", params) + } + + osparc.store.Pricing.getInstance().createPricingUnit(this.getPricingPlanId(), data) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Successfully created")); + osparc.FlashMessenger.logAs(this.tr("Successfully created")); this.fireEvent("done"); }) - .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong"), "ERROR"); - console.error(err); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.getChildControl("create").setFetching(false)); }, __updatePricingUnit: function() { - const unitName = this.getUnitName(); - const costPerUnit = this.getCostPerUnit(); - const comment = this.getComment(); - const specificInfo = this.getSpecificInfo(); - const extraInfo = {}; - extraInfo["CPU"] = this.getUnitExtraInfoCPU(); - extraInfo["RAM"] = this.getUnitExtraInfoRAM(); - extraInfo["VRAM"] = this.getUnitExtraInfoVRAM(); - Object.assign(extraInfo, this.getUnitExtraInfo()); - const isDefault = this.getDefault(); - - const params = { - url: { - "pricingPlanId": this.getPricingPlanId(), - "pricingUnitId": this.getPricingUnitId() + const data = { + "unitName": this.getUnitName(), + "pricingUnitCostUpdate": { + "costPerUnit": this.getCostPerUnit(), + "comment": this.getComment(), }, - data: { - "unitName": unitName, - "pricingUnitCostUpdate": { - "cost_per_unit": costPerUnit, - "comment": comment - }, - "specificInfo": { + "default": this.getDefault(), + }; + + const pricingPlan = osparc.store.Pricing.getInstance().getPricingPlan(this.getPricingPlanId()); + if (pricingPlan) { + if (pricingPlan.getClassification() === "TIER") { + const specificInfo = this.getSpecificInfo(); + data["specificInfo"] = { + "aws_ec2_instances": [specificInfo] + }; + const extraInfo = { + "CPU": this.getUnitExtraInfoCPU(), + "RAM": this.getUnitExtraInfoRAM(), + "VRAM": this.getUnitExtraInfoVRAM(), + }; + Object.assign(extraInfo, this.getUnitExtraInfo()); + data["unitExtraInfo"] = extraInfo; + } else if (pricingPlan.getClassification() === "LICENSE") { + const specificInfo = this.getSpecificInfo(); + data["specificInfo"] = { "aws_ec2_instances": [specificInfo] - }, - "unitExtraInfo": extraInfo, - "default": isDefault + }; + data["unitExtraInfo"] = { + "num_of_seats": this.getUnitExtraInfoNSeats(), + }; } - }; - osparc.data.Resources.fetch("pricingUnits", "update", params) + } + + osparc.store.Pricing.getInstance().updatePricingUnit(this.getPricingPlanId(), this.getPricingUnitId(), data) .then(() => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Successfully updated")); + osparc.FlashMessenger.logAs(this.tr("Successfully updated")); this.fireEvent("done"); }) - .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong"), "ERROR"); - console.error(err); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.getChildControl("save").setFetching(false)); } } diff --git a/services/static-webserver/client/source/class/osparc/pricing/UnitsList.js b/services/static-webserver/client/source/class/osparc/pricing/UnitsList.js index 054d4e4f11b..b5563c3d75c 100644 --- a/services/static-webserver/client/source/class/osparc/pricing/UnitsList.js +++ b/services/static-webserver/client/source/class/osparc/pricing/UnitsList.js @@ -99,9 +99,7 @@ qx.Class.define("osparc.pricing.UnitsList", { }, __openCreatePricingUnit: function() { - const puCreator = new osparc.pricing.UnitEditor().set({ - pricingPlanId: this.getPricingPlanId() - }); + const puCreator = new osparc.pricing.UnitEditor(this.getPricingPlanId(), null); const title = this.tr("Pricing Unit Creator"); const win = osparc.ui.window.Window.popUpInWindow(puCreator, title, 400, 250); puCreator.addListener("done", () => { @@ -112,9 +110,7 @@ qx.Class.define("osparc.pricing.UnitsList", { }, __openUpdatePricingUnit: function(pricingUnit) { - const puEditor = new osparc.pricing.UnitEditor(pricingUnit).set({ - pricingPlanId: this.getPricingPlanId() - }); + const puEditor = new osparc.pricing.UnitEditor(this.getPricingPlanId(), pricingUnit); const title = this.tr("Pricing Unit Editor"); const win = osparc.ui.window.Window.popUpInWindow(puEditor, title, 400, 250); puEditor.addListener("done", () => { diff --git a/services/static-webserver/client/source/class/osparc/product/AboutProduct.js b/services/static-webserver/client/source/class/osparc/product/AboutProduct.js index 507c8c9ae7c..2736f96425a 100644 --- a/services/static-webserver/client/source/class/osparc/product/AboutProduct.js +++ b/services/static-webserver/client/source/class/osparc/product/AboutProduct.js @@ -49,10 +49,8 @@ qx.Class.define("osparc.product.AboutProduct", { __buildLayout: function() { switch (osparc.product.Utils.getProductName()) { case "s4l": - this.__buildS4LLayout(); - break; case "s4lacad": - this.__buildS4LAcademicLayout(); + this.__buildS4LLayout(); break; case "s4llite": this.__buildS4LLiteLayout(); @@ -62,7 +60,7 @@ qx.Class.define("osparc.product.AboutProduct", { this.__buildTIPLayout(); break; default: { - const noInfoText = this.tr("Information not available"); + const noInfoText = this.tr("Information is unavailable"); const noInfoLabel = osparc.product.quickStart.Utils.createLabel(noInfoText); this.add(noInfoLabel); break; @@ -73,11 +71,11 @@ qx.Class.define("osparc.product.AboutProduct", { __buildS4LLayout: function() { const licenseUrl = osparc.store.Support.getLicenseURL(); const text = this.tr(` - sim4life.io is a native implementation of the most advanced simulation platform, Sim4Life, in the cloud. \ + Sim4Life.web is a native implementation of the most advanced simulation platform, Sim4Life, in the cloud. \ The platform empowers users to simulate, analyze, and predict complex, multifaceted, and dynamic biological interactions within the full anatomical complexity of the human body. \ It provides the ability to set up and run complex simulations directly within any browser, utilizing cloud technology.

- sim4life.io makes use of technologies developed by our research partner for the o2S2PARC platform, the IT’IS Foundation, and co-funded by the U.S. National Institutes of Health’s SPARC initiative.\ + Sim4Life.web makes use of technologies developed by our research partner for the o2S2PARC platform, the IT’IS Foundation, and co-funded by the U.S. National Institutes of Health’s SPARC initiative.\

For more information about Sim4Life, please visit ${osparc.utils.Utils.createHTMLLink("sim4life.swiss", "https://sim4life.swiss/")}.

@@ -90,26 +88,6 @@ qx.Class.define("osparc.product.AboutProduct", { this.add(label); }, - __buildS4LAcademicLayout: function() { - const licenseUrl = osparc.store.Support.getLicenseURL(); - const text = this.tr(` - sim4life.science is a native implementation of the most advanced simulation platform, Sim4Life, in the cloud. \ - The platform empowers users to simulate, analyze, and predict complex, multifaceted, and dynamic biological interactions within the full anatomical complexity of the human body. \ - It provides the ability to set up and run complex simulations directly within any browser, utilizing cloud technology. -

- sim4life.science makes use of technologies developed by our research partner for the o2S2PARC platform, the IT’IS Foundation, and co-funded by the U.S. National Institutes of Health’s SPARC initiative.\ -

- For more information about Sim4Life, please visit ${osparc.utils.Utils.createHTMLLink("sim4life.swiss", "href='https://sim4life.swiss/")}. -

- To review license agreements, click ${osparc.utils.Utils.createHTMLLink("here", licenseUrl)}. -

- Send us an email ${this.__getMailTo()} - `); - - const label = osparc.product.quickStart.Utils.createLabel(text); - this.add(label); - }, - __buildS4LLiteLayout: function() { // https://zurichmedtech.github.io/s4l-lite-manual/#/docs/what_is_s4l_lite const introText = "Sim4Life.lite is a powerful web-based simulation platform that allows you to model and analyze real-world phenomena and to design complex technical devices in a validated environment. With its intuitive interface and advanced tools, Sim4Life.lite makes it easy to develop your simulation project, wherever you are."; diff --git a/services/static-webserver/client/source/class/osparc/product/Utils.js b/services/static-webserver/client/source/class/osparc/product/Utils.js index 4c77b84e0e1..656ce42654e 100644 --- a/services/static-webserver/client/source/class/osparc/product/Utils.js +++ b/services/static-webserver/client/source/class/osparc/product/Utils.js @@ -188,6 +188,16 @@ qx.Class.define("osparc.product.Utils", { return "REGISTER"; }, + hasConvertToPipelineEnabled: function() { + return false; + }, + + // oSPARC only + hasExportCMisEnabled: function() { + const product = this.getProductName(); + return product === "osparc"; + }, + // All products except oSPARC hasIdlingTrackerEnabled: function() { const product = this.getProductName(); @@ -256,7 +266,25 @@ qx.Class.define("osparc.product.Utils", { return this.isS4LProduct() && licensesEnabled; }, - getProductThumbUrl: function(asset = "Default.png") { + getIconUrl: function(asset = "Default.png") { + const base = "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/icons" + let url; + switch (osparc.product.Utils.getProductName()) { + case "osparc": + url = `${base}/osparc/${asset}`; + break; + case "tis": + case "tiplite": + url = `${base}/tip/${asset}`; + break; + default: + url = `${base}/s4l/${asset}`; + break; + } + return url; + }, + + getThumbnailUrl: function(asset = "Default.png") { const base = "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/project_thumbnails" let url; switch (osparc.product.Utils.getProductName()) { @@ -274,7 +302,7 @@ qx.Class.define("osparc.product.Utils", { return url; }, - getProductBackgroundUrl: function(asset = "Thumbnail-01.png") { + getBackgroundUrl: function(asset = "Thumbnail-01.png") { const base = "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/background-images" let url; switch (osparc.product.Utils.getProductName()) { @@ -293,14 +321,7 @@ qx.Class.define("osparc.product.Utils", { }, hasNewPlusButton: function() { - return [ - "osparc", - "s4l", - "s4lacad", - "s4llite", - // "tis", - // "tiplite", - ].includes(osparc.product.Utils.getProductName()); + return Boolean(osparc.store.Products.getInstance().getPlusButtonUiConfig()); }, } }); diff --git a/services/static-webserver/client/source/class/osparc/product/tours/Tours.js b/services/static-webserver/client/source/class/osparc/product/tours/Tours.js index 5216ec483b1..8f9e061817a 100644 --- a/services/static-webserver/client/source/class/osparc/product/tours/Tours.js +++ b/services/static-webserver/client/source/class/osparc/product/tours/Tours.js @@ -26,12 +26,18 @@ qx.Class.define("osparc.product.tours.Tours", { statics: { TOURS: { - "s4llite": { - fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/s4llite_tours.json") + "osparc": { + fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/osparc_tours.json") }, "s4l": { fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/s4l_tours.json") }, + "s4lacad": { + fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/s4l_tours.json") + }, + "s4llite": { + fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/s4llite_tours.json") + }, "tis": { fetchTours: () => osparc.product.tours.Tours.fetchTours("/resource/osparc/tours/tis_tours.json") }, diff --git a/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js b/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js index 215b17d935b..9850b098455 100644 --- a/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js +++ b/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js @@ -47,8 +47,7 @@ qx.Class.define("osparc.service.PricingUnitsList", { }, __fetchUnits: function() { - const pricingStore = osparc.store.Pricing.getInstance(); - pricingStore.fetchPricingPlansService(this.__serviceMetadata["key"], this.__serviceMetadata["version"]) + osparc.store.Pricing.getInstance().fetchPricingPlansService(this.__serviceMetadata["key"], this.__serviceMetadata["version"]) .then(data => this.__populateList(data["pricingUnits"])) .catch(err => { console.error(err); diff --git a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js index ce84ade1dcb..6c007907944 100644 --- a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js +++ b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js @@ -24,8 +24,7 @@ qx.Class.define("osparc.service.ServiceListItem", { this.set({ width: this.self().ITEM_WIDTH, height: this.self().ITEM_HEIGHT, - paddingTop: 0, - paddingBottom: 0, + padding: this.self().PADDING, allowGrowX: true, focusable: true, }); @@ -53,8 +52,9 @@ qx.Class.define("osparc.service.ServiceListItem", { statics: { LATEST: "latest", ITEM_WIDTH: 550, - ITEM_HEIGHT: 35, - SERVICE_ICON: osparc.product.Utils.getProductThumbUrl() + ITEM_HEIGHT: 32 + 2*4, // thumbnail + 2*PADDING + PADDING: 4, + SERVICE_THUMBNAIL: osparc.product.Utils.getThumbnailUrl() }, members: { @@ -66,7 +66,7 @@ qx.Class.define("osparc.service.ServiceListItem", { if (service.getThumbnail()) { this.getChildControl("icon").setSource(service.getThumbnail()); } else { - this.getChildControl("icon").setSource(this.self().SERVICE_ICON); + this.getChildControl("icon").setSource(this.self().SERVICE_THUMBNAIL); } service.bind("name", this.getChildControl("title"), "value"); diff --git a/services/static-webserver/client/source/class/osparc/service/StatusUI.js b/services/static-webserver/client/source/class/osparc/service/StatusUI.js index 7fab5f32b70..2203da004c4 100644 --- a/services/static-webserver/client/source/class/osparc/service/StatusUI.js +++ b/services/static-webserver/client/source/class/osparc/service/StatusUI.js @@ -124,6 +124,8 @@ qx.Class.define("osparc.service.StatusUI", { return qx.locale.Manager.tr("Idle"); case "WAITING_FOR_RESOURCES": return qx.locale.Manager.tr("Waiting for resources"); + case "FAILED": + return qx.locale.Manager.tr("Unsuccessful"); // dynamics case "idle": @@ -131,7 +133,7 @@ qx.Class.define("osparc.service.StatusUI", { case "ready": return qx.locale.Manager.tr("Ready"); case "failed": - return qx.locale.Manager.tr("Failed"); + return qx.locale.Manager.tr("Unsuccessful"); case "deprecated": return qx.locale.Manager.tr("Deprecated"); case "retired": @@ -235,7 +237,7 @@ qx.Class.define("osparc.service.StatusUI", { const chip = new osparc.ui.basic.Chip().set({ label: osparc.service.Utils.DEPRECATED_SERVICE_TEXT, icon: osparc.service.StatusUI.getIconSource("deprecated"), - textColor: "contrasted-text-dark", + textColor: "text-on-warning", backgroundColor: osparc.service.StatusUI.getColor("deprecated"), allowGrowX: false }); @@ -246,7 +248,7 @@ qx.Class.define("osparc.service.StatusUI", { const chip = new osparc.ui.basic.Chip().set({ label: osparc.service.Utils.RETIRED_SERVICE_TEXT, icon: osparc.service.StatusUI.getIconSource("retired"), - textColor: "contrasted-text-dark", + textColor: "text-on-warning", backgroundColor: osparc.service.StatusUI.getColor("retired"), allowGrowX: false }); diff --git a/services/static-webserver/client/source/class/osparc/service/Utils.js b/services/static-webserver/client/source/class/osparc/service/Utils.js index 48639568506..ab1ee841c74 100644 --- a/services/static-webserver/client/source/class/osparc/service/Utils.js +++ b/services/static-webserver/client/source/class/osparc/service/Utils.js @@ -24,7 +24,7 @@ * Here is a little example of how to use the widget. * *
- *   let latestSrv = osparc.service.Utils.getLatest(key);
+ *   let latestSrv = osparc.store.Services.getLatest(key);
  * 
*/ @@ -130,86 +130,11 @@ qx.Class.define("osparc.service.Utils", { return services; }, - getVersions: function(key, filterDeprecated = true) { - const services = osparc.store.Services.servicesCached; - let versions = []; - if (key in services) { - const serviceVersions = services[key]; - versions = versions.concat(Object.keys(serviceVersions)); - if (filterDeprecated) { - versions = versions.filter(version => { - if (services[key][version]["retired"]) { - return false; - } - return true; - }); - } - versions.sort(osparc.utils.Utils.compareVersionNumbers); - } - return versions.reverse(); - }, - - getLatest: function(key) { - const services = osparc.store.Services.servicesCached; - if (key in services) { - const versions = this.getVersions(key, true); - if (versions.length) { - return services[key][versions[0]]; - } - } - return null; - }, - - getLatestCompatible: function(key, version) { - const services = osparc.store.Services.servicesCached; - if (key in services && version in services[key]) { - const serviceMD = services[key][version]; - if (serviceMD["compatibility"] && serviceMD["compatibility"]["canUpdateTo"]) { - const canUpdateTo = serviceMD["compatibility"]["canUpdateTo"]; - return { - key: "key" in canUpdateTo ? canUpdateTo["key"] : key, // key is optional - version: canUpdateTo["version"] - } - } - // the provided key/version itself is the latest compatible - return { - key, - version - } - } - return null; - }, - - getVersionDisplay: function(key, version) { - const services = osparc.store.Services.servicesCached; - if (key in services && version in services[key]) { - return this.extractVersionDisplay(services[key][version]); - } - return null; - }, - extractVersionDisplay: function(metadata) { - return metadata["versionDisplay"] ? metadata["versionDisplay"] : metadata["version"]; - }, - - getReleasedDate: function(key, version) { - const services = osparc.store.Services.servicesCached; - if ( - key in services && - version in services[key] && - "released" in services[key][version] - ) { - return services[key][version]["released"]; + if (metadata) { + return metadata["versionDisplay"] ? metadata["versionDisplay"] : metadata["version"]; } - return null; - }, - - versionToListItem: function(key, version) { - const versionDisplay = this.getVersionDisplay(key, version); - const listItem = new qx.ui.form.ListItem(versionDisplay); - osparc.utils.Utils.setIdToWidget(listItem, "serviceVersionItem_" + versionDisplay); - listItem.version = version; - return listItem; + return ""; }, canIWrite: function(serviceAccessRights) { @@ -228,14 +153,38 @@ qx.Class.define("osparc.service.Utils", { DEPRECATED_AUTOUPDATABLE_INSTRUCTIONS: qx.locale.Manager.tr("Please Stop the Service and then Update it"), RETIRED_AUTOUPDATABLE_INSTRUCTIONS: qx.locale.Manager.tr("Please Update the Service"), + extractVersionFromHistory: function(metadata) { + if (metadata["history"]) { + const found = metadata["history"].find(historyEntry => historyEntry["version"] === metadata["version"]); + return found; + } + return null; + }, + isUpdatable: function(metadata) { - const latestCompatible = this.getLatestCompatible(metadata["key"], metadata["version"]); - return latestCompatible && (metadata["key"] !== latestCompatible["key"] || metadata["version"] !== latestCompatible["version"]); + const historyEntry = this.extractVersionFromHistory(metadata); + if (historyEntry && historyEntry["compatibility"] && historyEntry["compatibility"]["canUpdateTo"]) { + const latestCompatible = historyEntry["compatibility"]["canUpdateTo"]; + return latestCompatible && (metadata["key"] !== latestCompatible["key"] || metadata["version"] !== latestCompatible["version"]); + } + return false; + }, + + __extractRetiredDate: function(metadata) { + if ("release" in metadata && metadata["release"]["retired"]) { + // this works for service latest + return new Date(metadata["release"]["retired"]); + } + const historyEntry = this.extractVersionFromHistory(metadata); + if (historyEntry && "retired" in historyEntry && historyEntry["retired"]) { + return new Date(historyEntry["retired"]); + } + return null; }, isDeprecated: function(metadata) { - if (metadata && "deprecated" in metadata && ![null, undefined].includes(metadata["deprecated"])) { - const deprecationTime = new Date(metadata["deprecated"]); + const deprecationTime = this.__extractRetiredDate(metadata); + if (deprecationTime) { const now = new Date(); return deprecationTime.getTime() > now.getTime(); } @@ -243,8 +192,8 @@ qx.Class.define("osparc.service.Utils", { }, isRetired: function(metadata) { - if (metadata && "deprecated" in metadata && ![null, undefined].includes(metadata["deprecated"])) { - const deprecationTime = new Date(metadata["deprecated"]); + const deprecationTime = this.__extractRetiredDate(metadata); + if (deprecationTime) { const now = new Date(); return deprecationTime.getTime() < now.getTime(); } @@ -252,34 +201,11 @@ qx.Class.define("osparc.service.Utils", { }, getDeprecationDateText: function(metadata) { - const deprecationTime = new Date(metadata["deprecated"]); - return qx.locale.Manager.tr("It will be Retired: ") + osparc.utils.Utils.formatDate(deprecationTime); - }, - - getFilePicker: function() { - return this.self().getLatest("simcore/services/frontend/file-picker"); - }, - - getParametersMetadata: function() { - const parametersMetadata = []; - const services = osparc.store.Services.servicesCached; - for (const key in services) { - if (key.includes("simcore/services/frontend/parameter/")) { - const latest = this.self().getLatest(key); - if (latest) { - parametersMetadata.push(latest); - } - } + if (this.isDeprecated(metadata) || this.isRetired(metadata)) { + const deprecationTime = this.__extractRetiredDate(metadata); + return qx.locale.Manager.tr("It will be Retired: ") + osparc.utils.Utils.formatDate(deprecationTime); } - return parametersMetadata; - }, - - getParameterMetadata: function(type) { - return this.self().getLatest("simcore/services/frontend/parameter/"+type); - }, - - getProbeMetadata: function(type) { - return this.self().getLatest("simcore/services/frontend/iterator-consumer/probe/"+type); + return ""; }, removeFileToKeyMap: function(service) { diff --git a/services/static-webserver/client/source/class/osparc/share/Collaborators.js b/services/static-webserver/client/source/class/osparc/share/Collaborators.js index 004666fcbab..ea819493cab 100644 --- a/services/static-webserver/client/source/class/osparc/share/Collaborators.js +++ b/services/static-webserver/client/source/class/osparc/share/Collaborators.js @@ -335,6 +335,21 @@ qx.Class.define("osparc.share.Collaborators", { }); item.addListener("removeMember", e => { const orgMember = e.getData(); + if ( + ["study", "template"].includes(this._resourceType) && + !osparc.share.CollaboratorsStudy.canCollaboratorBeRemoved(this._serializedDataCopy, orgMember["gid"]) + ) { + let msg = this.tr("Collaborator can't be removed:"); + msg += this._serializedDataCopy["name"] + this.tr(" needs at least one owner."); + if ( + Object.keys(this._serializedDataCopy["accessRights"]).length === 1 && + Object.values(this._serializedDataCopy["accessRights"])[0]["delete"] + ) { + msg += "
" + this.tr("You might want to delete it instead."); + } + osparc.FlashMessenger.logError(msg); + return; + } this._deleteMember(orgMember, item); }); } @@ -349,11 +364,8 @@ qx.Class.define("osparc.share.Collaborators", { __getLeaveStudyButton: function() { const myGid = osparc.auth.Data.getInstance().getGroupId(); if ( - (this._resourceType === "study") && - // check if I'm part of the access rights (not through an organization) - Object.keys(this._serializedDataCopy["accessRights"]).includes(myGid.toString()) && - // check also user is not "prjOwner". Backend will silently not let the frontend remove that user. - (this._serializedDataCopy["prjOwner"] !== osparc.auth.Data.getInstance().getEmail()) + ["study", "template"].includes(this._resourceType) && + osparc.share.CollaboratorsStudy.canCollaboratorBeRemoved(this._serializedDataCopy, myGid) ) { const leaveText = this.tr("Leave") + " " + osparc.product.Utils.getStudyAlias({ firstUpperCase: true @@ -363,29 +375,14 @@ qx.Class.define("osparc.share.Collaborators", { visibility: Object.keys(this._serializedDataCopy["accessRights"]).includes(myGid.toString()) ? "visible" : "excluded" }); leaveButton.addListener("execute", () => { - let msg = `"${this._serializedDataCopy["name"]}" ` + this.tr("will no longer be listed."); - if (!osparc.share.CollaboratorsStudy.checkRemoveCollaborator(this._serializedDataCopy, myGid)) { - msg += "
"; - msg += this.tr("If you remove yourself, there won't be any other Owners."); + const collaborator = { + gid: myGid, + name: osparc.store.Groups.getInstance().getGroupMe().getLabel(), } - const win = new osparc.ui.window.Confirmation(msg).set({ - caption: leaveText, - confirmText: this.tr("Leave"), - confirmAction: "delete" - }); - win.open(); - win.addListener("close", () => { - if (win.getConfirmed()) { - const collaborator = { - gid: myGid, - name: osparc.store.Groups.getInstance().getGroupMe().getLabel(), - } - this._deleteMember(collaborator) - .then(() => { - qx.event.message.Bus.dispatchByName("reloadStudies"); - }); - } - }, this); + this._deleteMember(collaborator) + .then(() => { + qx.event.message.Bus.dispatchByName("reloadStudies"); + }); }, this); return leaveButton; } @@ -431,7 +428,7 @@ qx.Class.define("osparc.share.Collaborators", { "description": collab.getDescription(), }; if (!("getUserId" in collab)) { - // orgnanization + // organization if (everyoneGIds.includes(parseInt(gid))) { collaborator["thumbnail"] = "@FontAwesome5Solid/globe/32"; } else if (!collaborator["thumbnail"]) { diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js index bd16086cb27..5ca0e1717d7 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js @@ -76,13 +76,10 @@ qx.Class.define("osparc.share.CollaboratorsService", { .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); const text = this.tr("Service successfully shared"); - osparc.FlashMessenger.getInstance().logAs(text); + osparc.FlashMessenger.logAs(text); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong sharing the Service"), "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while sharing the service"))); }, _deleteMember: function(collaborator, item) { @@ -92,7 +89,7 @@ qx.Class.define("osparc.share.CollaboratorsService", { const success = delete this._serializedDataCopy["accessRights"][collaborator["gid"]]; if (!success) { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing Member"), "ERROR"); + osparc.FlashMessenger.logError(this.tr("Something went wrong while removing member")); if (item) { item.setEnabled(true); } @@ -102,13 +99,10 @@ qx.Class.define("osparc.share.CollaboratorsService", { osparc.store.Services.patchServiceData(this._serializedDataCopy, "accessRights", this._serializedDataCopy["accessRights"]) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); - osparc.FlashMessenger.getInstance().logAs(collaborator["name"] + this.tr(" successfully removed")); + osparc.FlashMessenger.logAs(collaborator["name"] + this.tr(" successfully removed")); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing ") + collaborator["name"], "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while removing ") + collaborator["name"])) .finally(() => { if (item) { item.setEnabled(true); @@ -122,13 +116,10 @@ qx.Class.define("osparc.share.CollaboratorsService", { osparc.store.Services.patchServiceData(this._serializedDataCopy, "accessRights", this._serializedDataCopy["accessRights"]) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); - osparc.FlashMessenger.getInstance().logAs(successMsg); + osparc.FlashMessenger.logAs(successMsg); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(failureMsg, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, failureMsg)) .finally(() => item.setEnabled(true)); }, @@ -137,13 +128,13 @@ qx.Class.define("osparc.share.CollaboratorsService", { collaborator["gid"], this.self().getOwnerAccessRight(), this.tr(`Successfully promoted to ${osparc.data.Roles.SERVICE[2].label}`), - this.tr(`Something went wrong promoting to ${osparc.data.Roles.SERVICE[2].label}`), + this.tr(`Something went wrong while promoting to ${osparc.data.Roles.SERVICE[2].label}`), item ); }, _promoteToOwner: function(collaborator, item) { - osparc.FlashMessenger.getInstance().logAs(this.tr("Operation not available"), "WARNING"); + osparc.FlashMessenger.logAs(this.tr("Operation not available"), "WARNING"); }, _demoteToUser: function(collaborator, item) { @@ -151,13 +142,13 @@ qx.Class.define("osparc.share.CollaboratorsService", { collaborator["gid"], this.self().getCollaboratorAccessRight(), this.tr(`Successfully demoted to ${osparc.data.Roles.SERVICE[1].label}`), - this.tr(`Something went wrong demoting ${osparc.data.Roles.SERVICE[1].label}`), + this.tr(`Something went wrong while demoting ${osparc.data.Roles.SERVICE[1].label}`), item ); }, _demoteToEditor: function(collaborator, item) { - osparc.FlashMessenger.getInstance().logAs(this.tr("Operation not available"), "WARNING"); + osparc.FlashMessenger.logAs(this.tr("Operation not available"), "WARNING"); } } }); diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js index 771a4284331..a1fa2fb1ef8 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js @@ -92,7 +92,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { }, // checks that if the user to remove is an owner, there will still be another owner - checkRemoveCollaborator: function(studyData, gid) { + canCollaboratorBeRemoved: function(studyData, gid) { const ownerGids = this.__getDeleters(studyData); if (ownerGids.includes(gid.toString())) { return ownerGids.length > 1; @@ -114,20 +114,17 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { gids.forEach(gid => { newCollaborators[gid] = this._resourceType === "study" ? this.self().getCollaboratorAccessRight() : this.self().getViewerAccessRight(); }); - osparc.info.StudyUtils.addCollaborators(this._serializedDataCopy, newCollaborators) + osparc.store.Study.addCollaborators(this._serializedDataCopy, newCollaborators) .then(() => { const text = resourceAlias + this.tr(" successfully shared"); - osparc.FlashMessenger.getInstance().logAs(text); + osparc.FlashMessenger.logAs(text); this.fireDataEvent("updateAccessRights", this._serializedDataCopy); this._reloadCollaboratorsList(); this.__pushNotifications(gids); this.__checkShareePermissions(gids); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong sharing the ") + resourceAlias, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while sharing the ") + resourceAlias)); }, _deleteMember: function(collaborator, item) { @@ -135,16 +132,13 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { item.setEnabled(false); } - return osparc.info.StudyUtils.removeCollaborator(this._serializedDataCopy, collaborator["gid"]) + return osparc.store.Study.removeCollaborator(this._serializedDataCopy, collaborator["gid"]) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); - osparc.FlashMessenger.getInstance().logAs(collaborator["name"] + this.tr(" successfully removed")); + osparc.FlashMessenger.logAs(collaborator["name"] + this.tr(" successfully removed")); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing ") + collaborator["name"], "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while removing ") + collaborator["name"])) .finally(() => { if (item) { item.setEnabled(true); @@ -155,16 +149,13 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { __make: function(collaboratorGId, newAccessRights, successMsg, failureMsg, item) { item.setEnabled(false); - osparc.info.StudyUtils.updateCollaborator(this._serializedDataCopy, collaboratorGId, newAccessRights) + osparc.store.Study.updateCollaborator(this._serializedDataCopy, collaboratorGId, newAccessRights) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); - osparc.FlashMessenger.getInstance().logAs(successMsg); + osparc.FlashMessenger.logAs(successMsg); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(failureMsg, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, failureMsg)) .finally(() => { if (item) { item.setEnabled(true); @@ -177,7 +168,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { collaborator["gid"], this.self().getCollaboratorAccessRight(), this.tr(`Successfully promoted to ${osparc.data.Roles.STUDY[2].label}`), - this.tr(`Something went wrong promoting to ${osparc.data.Roles.STUDY[2].label}`), + this.tr(`Something went wrong while promoting to ${osparc.data.Roles.STUDY[2].label}`), item ); }, @@ -187,7 +178,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { collaborator["gid"], this.self().getOwnerAccessRight(), this.tr(`Successfully promoted to ${osparc.data.Roles.STUDY[3].label}`), - this.tr(`Something went wrong promoting to ${osparc.data.Roles.STUDY[3].label}`), + this.tr(`Something went wrong while promoting to ${osparc.data.Roles.STUDY[3].label}`), item ); }, @@ -199,7 +190,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { gid, this.self().getViewerAccessRight(), this.tr(`Successfully demoted to ${osparc.data.Roles.STUDY[1].label}`), - this.tr(`Something went wrong demoting to ${osparc.data.Roles.STUDY[1].label}`), + this.tr(`Something went wrong while demoting to ${osparc.data.Roles.STUDY[1].label}`), itm ); }; @@ -229,7 +220,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { collaborator["gid"], this.self().getCollaboratorAccessRight(), this.tr(`Successfully demoted to ${osparc.data.Roles.STUDY[2].label}`), - this.tr(`Something went wrong demoting to ${osparc.data.Roles.STUDY[2].label}`), + this.tr(`Something went wrong while demoting to ${osparc.data.Roles.STUDY[2].label}`), item ); }, diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js index 0327bf29589..2f2dd4f5b32 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js @@ -77,14 +77,11 @@ qx.Class.define("osparc.share.CollaboratorsTag", { osparc.store.Tags.getInstance().addCollaborators(this.__tag.getTagId(), newCollaborators) .then(() => { const text = this.tr("Tag successfully shared"); - osparc.FlashMessenger.getInstance().logAs(text); + osparc.FlashMessenger.logAs(text); this.fireDataEvent("updateAccessRights", this.__tag.serialize()); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong sharing the Tag"), "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while sharing the tag"))); }, _deleteMember: function(collaborator, item) { @@ -95,13 +92,10 @@ qx.Class.define("osparc.share.CollaboratorsTag", { osparc.store.Tags.getInstance().removeCollaborator(this.__tag.getTagId(), collaborator["gid"]) .then(() => { this.fireDataEvent("updateAccessRights", this.__tag.serialize()); - osparc.FlashMessenger.getInstance().logAs(collaborator["name"] + this.tr(" successfully removed")); + osparc.FlashMessenger.logAs(collaborator["name"] + this.tr(" successfully removed")); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing ") + collaborator["name"], "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while removing ") + collaborator["name"])) .finally(() => { if (item) { item.setEnabled(true); @@ -115,13 +109,10 @@ qx.Class.define("osparc.share.CollaboratorsTag", { osparc.store.Tags.getInstance().updateCollaborator(this.__tag.getTagId(), collaboratorGId, newAccessRights) .then(() => { this.fireDataEvent("updateAccessRights", this.__tag.serialize()); - osparc.FlashMessenger.getInstance().logAs(successMsg); + osparc.FlashMessenger.logAs(successMsg); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(failureMsg, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, failureMsg)) .finally(() => { if (item) { item.setEnabled(true); @@ -134,7 +125,7 @@ qx.Class.define("osparc.share.CollaboratorsTag", { collaborator["gid"], this.self().getCollaboratorAccessRight(), this.tr(`Successfully promoted to ${osparc.data.Roles.STUDY[2].label}`), - this.tr(`Something went wrong promoting to ${osparc.data.Roles.STUDY[2].label}`), + this.tr(`Something went wrong while promoting to ${osparc.data.Roles.STUDY[2].label}`), item ); }, @@ -144,7 +135,7 @@ qx.Class.define("osparc.share.CollaboratorsTag", { collaborator["gid"], this.self().getOwnerAccessRight(), this.tr(`Successfully promoted to ${osparc.data.Roles.STUDY[3].label}`), - this.tr(`Something went wrong promoting to ${osparc.data.Roles.STUDY[3].label}`), + this.tr(`Something went wrong while promoting to ${osparc.data.Roles.STUDY[3].label}`), item ); }, @@ -154,7 +145,7 @@ qx.Class.define("osparc.share.CollaboratorsTag", { collaborator["gid"], this.self().getViewerAccessRight(), this.tr(`Successfully demoted to ${osparc.data.Roles.STUDY[1].label}`), - this.tr(`Something went wrong demoting to ${osparc.data.Roles.STUDY[1].label}`), + this.tr(`Something went wrong while demoting to ${osparc.data.Roles.STUDY[1].label}`), item ); }, @@ -164,7 +155,7 @@ qx.Class.define("osparc.share.CollaboratorsTag", { collaborator["gid"], this.self().getCollaboratorAccessRight(), this.tr(`Successfully demoted to ${osparc.data.Roles.STUDY[2].label}`), - this.tr(`Something went wrong demoting to ${osparc.data.Roles.STUDY[2].label}`), + this.tr(`Something went wrong while demoting to ${osparc.data.Roles.STUDY[2].label}`), item ); } diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js index 97dd58bc573..9accc310f84 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js @@ -73,14 +73,11 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { osparc.store.Workspaces.getInstance().addCollaborators(this.__workspace.getWorkspaceId(), newCollaborators) .then(() => { const text = this.tr("Workspace successfully shared"); - osparc.FlashMessenger.getInstance().logAs(text); + osparc.FlashMessenger.logAs(text); this.fireDataEvent("updateAccessRights", this.__workspace.serialize()); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong sharing the Workspace"), "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while sharing the workspace"))); }, _deleteMember: function(collaborator, item) { @@ -91,13 +88,10 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { osparc.store.Workspaces.getInstance().removeCollaborator(this.__workspace.getWorkspaceId(), collaborator["gid"]) .then(() => { this.fireDataEvent("updateAccessRights", this.__workspace.serialize()); - osparc.FlashMessenger.getInstance().logAs(collaborator["name"] + this.tr(" successfully removed")); + osparc.FlashMessenger.logAs(collaborator["name"] + this.tr(" successfully removed")); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing ") + collaborator["name"], "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while removing ") + collaborator["name"])) .finally(() => { if (item) { item.setEnabled(true); @@ -111,13 +105,10 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { osparc.store.Workspaces.getInstance().updateCollaborator(this.__workspace.getWorkspaceId(), collaboratorGId, newAccessRights) .then(() => { this.fireDataEvent("updateAccessRights", this.__workspace.serialize()); - osparc.FlashMessenger.getInstance().logAs(successMsg); + osparc.FlashMessenger.logAs(successMsg); this._reloadCollaboratorsList(); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(failureMsg, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, failureMsg)) .finally(() => { if (item) { item.setEnabled(true); @@ -130,7 +121,7 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { collaborator["gid"], this.self().getCollaboratorAccessRight(), this.tr(`Successfully promoted to ${osparc.data.Roles.WORKSPACE[2].label}`), - this.tr(`Something went wrong promoting to ${osparc.data.Roles.WORKSPACE[2].label}`), + this.tr(`Something went wrong while promoting to ${osparc.data.Roles.WORKSPACE[2].label}`), item ); }, @@ -140,7 +131,7 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { collaborator["gid"], this.self().getOwnerAccessRight(), this.tr(`Successfully promoted to ${osparc.data.Roles.WORKSPACE[3].label}`), - this.tr(`Something went wrong promoting to ${osparc.data.Roles.WORKSPACE[3].label}`), + this.tr(`Something went wrong while promoting to ${osparc.data.Roles.WORKSPACE[3].label}`), item ); }, @@ -152,7 +143,7 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { gid, this.self().getViewerAccessRight(), this.tr(`Successfully demoted to ${osparc.data.Roles.WORKSPACE[1].label}`), - this.tr(`Something went wrong demoting to ${osparc.data.Roles.WORKSPACE[1].label}`), + this.tr(`Something went wrong while demoting to ${osparc.data.Roles.WORKSPACE[1].label}`), itm ); }; @@ -182,7 +173,7 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { collaborator["gid"], this.self().getCollaboratorAccessRight(), this.tr(`Successfully demoted to ${osparc.data.Roles.WORKSPACE[2].label}`), - this.tr(`Something went wrong demoting to ${osparc.data.Roles.WORKSPACE[2].label}`), + this.tr(`Something went wrong while demoting to ${osparc.data.Roles.WORKSPACE[2].label}`), item ); } diff --git a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js index 7f757b831a3..3008c818c99 100644 --- a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js +++ b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js @@ -124,10 +124,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { users.forEach(user => user["collabType"] = 2); this.__addPotentialCollaborators(users); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.__searchingCollaborators.exclude()); }, diff --git a/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js b/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js index c9b1557f111..fd2c8eaa384 100644 --- a/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js +++ b/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js @@ -76,7 +76,7 @@ qx.Class.define("osparc.snapshots.IterationsView", { "studyId": iteration["workcopy_project_id"] } }; - iterationPromises.push(osparc.data.Resources.getOne("studies", params)); + iterationPromises.push(osparc.data.Resources.fetch("studies", "getOne", params)); }); Promise.all(iterationPromises) .then(values => { @@ -204,7 +204,7 @@ qx.Class.define("osparc.snapshots.IterationsView", { "studyId": iterationId } }; - osparc.data.Resources.getOne("studies", params) + osparc.data.Resources.fetch("studies", "getOne", params) .then(data => { const studyData = this.__study.serialize(); studyData["workbench"] = data["workbench"]; @@ -275,7 +275,7 @@ qx.Class.define("osparc.snapshots.IterationsView", { .then(() => { this.__rebuildSnapshots(); }) - .catch(err => osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR")); + .catch(err => osparc.FlashMessenger.logError(err)); win.close(); }, this); editSnapshotView.addListener("cancel", () => { diff --git a/services/static-webserver/client/source/class/osparc/snapshots/SnapshotsView.js b/services/static-webserver/client/source/class/osparc/snapshots/SnapshotsView.js index 6cbef9261a0..25ebd5d6550 100644 --- a/services/static-webserver/client/source/class/osparc/snapshots/SnapshotsView.js +++ b/services/static-webserver/client/source/class/osparc/snapshots/SnapshotsView.js @@ -179,7 +179,7 @@ qx.Class.define("osparc.snapshots.SnapshotsView", { .then(() => { this.__rebuildSnapshots(); }) - .catch(err => osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR")); + .catch(err => osparc.FlashMessenger.logError(err)); win.close(); }, this); editSnapshotView.addListener("cancel", () => { diff --git a/services/static-webserver/client/source/class/osparc/store/Data.js b/services/static-webserver/client/source/class/osparc/store/Data.js index 322dc1313bd..202a3b4788d 100644 --- a/services/static-webserver/client/source/class/osparc/store/Data.js +++ b/services/static-webserver/client/source/class/osparc/store/Data.js @@ -35,17 +35,52 @@ qx.Class.define("osparc.store.Data", { "fileCopied": "qx.event.type.Data", }, + statics: { + getAllItems: async function(locationId, path, cursor, allItems = []) { + if (allItems.length >= 10000) { + const msg = qx.locale.Manager.tr("Oops... more than 10.000 items to be listed here. Maybe it's time to make a folder :)."); + osparc.FlashMessenger.logAs(msg, "WARNING"); + return allItems; + } + + const params = { + url: { + locationId, + path: path || null, + cursor: cursor || null, + } + }; + let pagResp = null; + if (path) { + pagResp = await osparc.data.Resources.fetch("storagePaths", cursor ? "getPathsPage" : "getPaths", params); + } else { + pagResp = await osparc.data.Resources.fetch("storagePaths", cursor ? "getDatasetsPage" : "getDatasets", params); + } + + let nextCursor = null; + if (pagResp) { + if (pagResp["items"]) { + allItems.push(...pagResp["items"]); + } + if (pagResp["next_page"]) { + nextCursor = pagResp["next_page"]; + } + } + + if (nextCursor) { + return this.getAllItems(locationId, path, nextCursor, allItems); + } + return allItems; + }, + }, + members: { __locationsCached: null, __datasetsByLocationCached: null, - __filesByLocationAndDatasetCached: null, resetCache: function() { this.__locationsCached = []; this.__datasetsByLocationCached = {}; - this.__filesByLocationAndDatasetCached = {}; - - osparc.store.Store.getInstance().reset("storageLocations"); }, getLocationsCached: function() { @@ -62,8 +97,7 @@ qx.Class.define("osparc.store.Data", { if (cachedData) { resolve(cachedData); } else { - // Get available storage locations - osparc.data.Resources.get("storageLocations") + osparc.data.Resources.fetch("storageLocations", "getLocations") .then(locations => { // Add them to cache this.__locationsCached = locations; @@ -82,132 +116,51 @@ qx.Class.define("osparc.store.Data", { if (locationId in cache && cache[locationId] && cache[locationId].length) { const data = { location: locationId, - datasets: cache[locationId] + items: cache[locationId] }; return data; } return null; }, - getDatasetsByLocation: function(locationId) { - const emptyData = { + getDatasetsByLocation: async function(locationId) { + const data = { location: locationId, - datasets: [] + items: [] }; - return new Promise((resolve, reject) => { - // Get list of datasets - if (locationId === 1 && !osparc.data.Permissions.getInstance().canDo("storage.datcore.read")) { - reject(emptyData); - } + if (locationId === 1 && !osparc.data.Permissions.getInstance().canDo("storage.datcore.read")) { + return data; + } - const cachedData = this.getDatasetsByLocationCached(locationId); - if (cachedData) { - resolve(cachedData); - } else { - const params = { - url: { - locationId - } - }; - osparc.data.Resources.fetch("storageDatasets", "getByLocation", params) - .then(datasets => { - const data = { - location: locationId, - datasets: [] - }; - if (datasets && datasets.length>0) { - data.datasets = datasets; - } - // Add it to cache - this.__datasetsByLocationCached[locationId] = data.datasets; - resolve(data); - }) - .catch(err => { - console.error(err); - reject(emptyData); - }); - } - }); - }, + const cachedData = this.getDatasetsByLocationCached(locationId); + if (cachedData) { + return cachedData; + } - getFilesByLocationAndDatasetCached: function(locationId, datasetId) { - const cache = this.__filesByLocationAndDatasetCached; - if (locationId in cache && datasetId in cache[locationId]) { - const data = { - location: locationId, - dataset: datasetId, - files: cache[locationId][datasetId] - }; + try { + const allItems = await this.self().getAllItems(locationId); + this.__datasetsByLocationCached[locationId] = allItems; + data["items"] = allItems; + return data; + } catch (err) { + console.error(err); return data; } - return null; }, - getFilesByLocationAndDataset: function(locationId, datasetId) { - const emptyData = { - location: locationId, - dataset: datasetId, - files: [] - }; - return new Promise((resolve, reject) => { - // Get list of file meta data - if (locationId === 1 && !osparc.data.Permissions.getInstance().canDo("storage.datcore.read")) { - reject(emptyData); - } - - const cachedData = this.getFilesByLocationAndDatasetCached(locationId, datasetId); - if (cachedData) { - resolve(cachedData); - } else { - const params = { - url: { - locationId, - datasetId - } - }; - osparc.data.Resources.fetch("storageFiles", "getByLocationAndDataset", params) - .then(files => { - const data = { - location: locationId, - dataset: datasetId, - files: files && files.length>0 ? files : [] - }; - // Add it to cache - if (!(locationId in this.__filesByLocationAndDatasetCached)) { - this.__filesByLocationAndDatasetCached[locationId] = {}; - } - this.__filesByLocationAndDatasetCached[locationId][datasetId] = data.files; - resolve(data); - }) - .catch(err => { - console.error(err); - reject(emptyData); - }); - } - }); - }, + getItemsByLocationAndPath: async function(locationId, path) { + // Get list of file meta data + if (locationId === 1 && !osparc.data.Permissions.getInstance().canDo("storage.datcore.read")) { + return []; + } - getNodeFiles: function(nodeId) { - return new Promise((resolve, reject) => { - const params = { - url: { - nodeId: encodeURIComponent(nodeId) - } - }; - osparc.data.Resources.fetch("storageFiles", "getByNode", params) - .then(files => { - console.log("Node Files", files); - if (files && files.length>0) { - resolve(files); - } else { - resolve([]); - } - }) - .catch(err => { - console.error(err); - reject([]); - }); - }); + try { + const allItems = await this.self().getAllItems(locationId, path); + return allItems; + } catch (err) { + console.error(err); + return []; + } }, getPresignedLink: function(download = true, locationId, fileUuid, fileSize) { @@ -264,7 +217,7 @@ qx.Class.define("osparc.store.Data", { fileUuid: encodeURIComponent(fileUuid) } }; - osparc.data.Resources.fetch("storageFiles", "put", params) + osparc.data.Resources.fetch("storageFiles", "copy", params) .then(files => { const data = { data: files, @@ -274,15 +227,14 @@ qx.Class.define("osparc.store.Data", { this.fireDataEvent("fileCopied", data); }) .catch(err => { - console.error(err); - console.error("Failed copying file", fileUuid, "to", pathId); - osparc.FlashMessenger.getInstance().logAs(this.tr("Failed copying file"), "ERROR"); + osparc.FlashMessenger.logError(err, this.tr("Unsuccessful file copy")); this.fireDataEvent("fileCopied", null); }); return true; }, + // if folder path is provided as fileUuid, it can also be deleted deleteFile: function(locationId, fileUuid) { if (!osparc.data.Permissions.getInstance().canDo("study.node.data.delete", true)) { return null; @@ -304,10 +256,7 @@ qx.Class.define("osparc.store.Data", { }; return data; }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.getInstance().logAs(this.tr("Failed deleting file"), "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Unsuccessful file deletion"))); } } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Jobs.js b/services/static-webserver/client/source/class/osparc/store/Jobs.js new file mode 100644 index 00000000000..155a9759755 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/store/Jobs.js @@ -0,0 +1,78 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * @asset(osparc/mock_jobs.json") + */ + +qx.Class.define("osparc.store.Jobs", { + extend: qx.core.Object, + type: "singleton", + + properties: { + jobs: { + check: "Array", + init: [], + nullable: true, + event: "changeJobs" + } + }, + + members: { + fetchJobs: function() { + return osparc.utils.Utils.fetchJSON("/resource/osparc/mock_jobs.json") + .then(jobsData => { + if ("jobs" in jobsData) { + jobsData["jobs"].forEach(jobData => { + this.addJob(jobData); + }); + } + return this.getJobs(); + }) + .catch(err => console.error(err)); + }, + + fetchJobInfo: function(jobId) { + return osparc.utils.Utils.fetchJSON("/resource/osparc/mock_jobs.json") + .then(jobsData => { + if ("jobs_info" in jobsData && jobId in jobsData["jobs_info"]) { + return jobsData["jobs_info"][jobId]; + } + return null; + }) + .catch(err => console.error(err)); + }, + + addJob: function(jobData) { + const jobs = this.getJobs(); + const index = jobs.findIndex(t => t.getJobId() === jobData["job_id"]); + if (index === -1) { + const job = new osparc.data.Job(jobData); + jobs.push(job); + this.fireEvent("changeJobs"); + return job; + } + return null; + }, + + removeJobs: function() { + const jobs = this.getJobs(); + jobs.forEach(job => job.dispose()); + this.fireEvent("changeJobs"); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/store/LicensedItems.js b/services/static-webserver/client/source/class/osparc/store/LicensedItems.js index 7e86d835d98..5fc436bfdb5 100644 --- a/services/static-webserver/client/source/class/osparc/store/LicensedItems.js +++ b/services/static-webserver/client/source/class/osparc/store/LicensedItems.js @@ -22,60 +22,53 @@ qx.Class.define("osparc.store.LicensedItems", { construct: function() { this.base(arguments); - this.__licensedItems = null; - this.__modelsCache = {}; + this.__licensedItems = {}; }, statics: { - VIP_MODELS: { - HUMAN_BODY: "https://itis.swiss/PD_DirectDownload/getDownloadableItems/HumanWholeBody", - HUMAN_BODY_REGION: "https://itis.swiss/PD_DirectDownload/getDownloadableItems/HumanBodyRegion", - ANIMAL: "https://itis.swiss/PD_DirectDownload/getDownloadableItems/AnimalWholeBody", - PHANTOM: "https://speag.swiss/PD_DirectDownload/getDownloadableItems/ComputationalPhantom", + getLowerLicensedItems: function(licensedItems, key, version) { + const lowerLicensedItems = []; + licensedItems.forEach(licensedItem => { + if (licensedItem["key"] === key && licensedItem["version"] < version) { + lowerLicensedItems.push(licensedItem); + } + }); + return lowerLicensedItems; }, - curateAnatomicalModels: function(anatomicalModelsRaw) { - const anatomicalModels = []; - const models = anatomicalModelsRaw["availableDownloads"]; - models.forEach(model => { - const curatedModel = {}; - Object.keys(model).forEach(key => { - if (key === "Features") { - let featuresRaw = model["Features"]; - featuresRaw = featuresRaw.substring(1, featuresRaw.length-1); // remove brackets - featuresRaw = featuresRaw.split(","); // split the string by commas - const features = {}; - featuresRaw.forEach(pair => { // each pair is "key: value" - const keyValue = pair.split(":"); - features[keyValue[0].trim()] = keyValue[1].trim() - }); - curatedModel["Features"] = features; - } else { - curatedModel[key] = model[key]; - } - }); - anatomicalModels.push(curatedModel); + seatsToNSeats: function(seats) { + let nSeats = 0; + seats.forEach(seat => { + if ("numOfSeats" in seat) { + nSeats += seat["numOfSeats"]; + } else if ("getNumOfSeats" in seat) { + nSeats += seat.getNumOfSeats(); + } }); - return anatomicalModels; + return nSeats; }, }, members: { __licensedItems: null, - __modelsCache: null, getLicensedItems: function() { - if (this.__licensedItems) { + if (Object.keys(this.__licensedItems).length) { return new Promise(resolve => resolve(this.__licensedItems)); } return osparc.data.Resources.getInstance().getAllPages("licensedItems") - .then(licensedItems => { - this.__licensedItems = licensedItems; + .then(licensedItemsData => { + licensedItemsData.forEach(licensedItemData => this.__addLicensedItemsToCache(licensedItemData)); return this.__licensedItems; }); }, + __addLicensedItemsToCache: function(licensedItemData) { + const licensedItem = new osparc.data.model.LicensedItem(licensedItemData); + this.__licensedItems[licensedItem.getLicensedItemId()] = licensedItem; + }, + getPurchasedLicensedItems: function(walletId, urlParams, options = {}) { let purchasesParams = { url: { @@ -90,7 +83,7 @@ qx.Class.define("osparc.store.LicensedItems", { return osparc.data.Resources.fetch("licensedItems", "purchases", purchasesParams, options); }, - purchaseLicensedItem: function(licensedItemId, walletId, pricingPlanId, pricingUnitId, numberOfSeats) { + purchaseLicensedItem: function(licensedItemId, walletId, pricingPlanId, pricingUnitId, numOfSeats) { const params = { url: { licensedItemId @@ -99,7 +92,7 @@ qx.Class.define("osparc.store.LicensedItems", { "wallet_id": walletId, "pricing_plan_id": pricingPlanId, "pricing_unit_id": pricingUnitId, - "num_of_seats": numberOfSeats, // this should go away + "num_of_seats": numOfSeats, // this should go away }, } return osparc.data.Resources.fetch("licensedItems", "purchase", params); @@ -118,48 +111,5 @@ qx.Class.define("osparc.store.LicensedItems", { } return osparc.data.Resources.fetch("licensedItems", "checkouts", purchasesParams, options); }, - - __fetchVipModels: async function(vipSubset) { - if (!(vipSubset in this.self().VIP_MODELS)) { - return []; - } - - if (vipSubset in this.__modelsCache) { - return this.__modelsCache[vipSubset]; - } - - return await fetch(this.self().VIP_MODELS[vipSubset], { - method:"POST" - }) - .then(resp => resp.json()) - .then(anatomicalModelsRaw => { - const allAnatomicalModels = this.self().curateAnatomicalModels(anatomicalModelsRaw); - const anatomicalModels = []; - allAnatomicalModels.forEach(model => { - const anatomicalModel = {}; - anatomicalModel["modelId"] = model["ID"]; - anatomicalModel["thumbnail"] = model["Thumbnail"]; - anatomicalModel["name"] = model["Features"]["name"] + " " + model["Features"]["version"]; - anatomicalModel["description"] = model["Description"]; - anatomicalModel["features"] = model["Features"]; - anatomicalModel["date"] = model["Features"]["date"]; - anatomicalModel["DOI"] = model["DOI"]; - anatomicalModels.push(anatomicalModel); - }); - this.__modelsCache[vipSubset] = anatomicalModels; - return anatomicalModels; - }); - }, - - getVipModels: async function(vipSubset) { - const vipModels = this.self().VIP_MODELS; - if (vipSubset && vipSubset in vipModels) { - return await this.__fetchVipModels(vipSubset); - } - const promises = []; - Object.keys(vipModels).forEach(sbs => promises.push(this.__fetchVipModels(sbs))); - return await Promise.all(promises) - .then(values => values.flat()); - }, } }); diff --git a/services/static-webserver/client/source/class/osparc/data/PollTasks.js b/services/static-webserver/client/source/class/osparc/store/PollTasks.js similarity index 61% rename from services/static-webserver/client/source/class/osparc/data/PollTasks.js rename to services/static-webserver/client/source/class/osparc/store/PollTasks.js index 33c06d0e3ba..625b1838451 100644 --- a/services/static-webserver/client/source/class/osparc/data/PollTasks.js +++ b/services/static-webserver/client/source/class/osparc/store/PollTasks.js @@ -15,14 +15,10 @@ ************************************************************************ */ -qx.Class.define("osparc.data.PollTasks", { +qx.Class.define("osparc.store.PollTasks", { extend: qx.core.Object, type: "singleton", - construct: function() { - this.initTasks(); - }, - properties: { tasks: { check: "Array", @@ -33,15 +29,15 @@ qx.Class.define("osparc.data.PollTasks", { }, members: { - addTask: function(taskData, interval) { - const tasks = this.getTasks(); - const index = tasks.findIndex(t => t.getTaskId() === taskData["task_id"]); - if (index === -1) { - const task = new osparc.data.PollTask(taskData, interval); - tasks.push(task); - return task; - } - return null; + fetchTasks: function() { + return osparc.data.Resources.get("tasks") + .then(tasksData => { + tasksData.forEach(taskData => { + const interval = 1000; + this.__addTask(taskData, interval); + }); + }) + .catch(err => console.error(err)); }, createPollingTask: function(fetchPromise, interval) { @@ -49,7 +45,7 @@ qx.Class.define("osparc.data.PollTasks", { fetchPromise .then(taskData => { if ("status_href" in taskData) { - const task = this.addTask(taskData, interval); + const task = this.__addTask(taskData, interval); resolve(task); } else { throw Error("Status missing"); @@ -59,9 +55,28 @@ qx.Class.define("osparc.data.PollTasks", { }); }, + __addTask: function(taskData, interval = 1000) { + const tasks = this.getTasks(); + const index = tasks.findIndex(t => t.getTaskId() === taskData["task_id"]); + if (index === -1) { + const task = new osparc.data.PollTask(taskData, interval); + tasks.push(task); + return task; + } + return null; + }, + + getDuplicateStudyTasks: function() { + return this.getTasks().filter(task => task.getTaskId().includes("from_study") && !task.getTaskId().includes("as_template")); + }, + + getPublishTemplateTasks: function() { + return this.getTasks().filter(task => task.getTaskId().includes("from_study") && task.getTaskId().includes("as_template")); + }, + removeTasks: function() { const tasks = this.getTasks(); tasks.forEach(task => task.dispose()); - } + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Pricing.js b/services/static-webserver/client/source/class/osparc/store/Pricing.js index b43ec07f806..46f7629d8a8 100644 --- a/services/static-webserver/client/source/class/osparc/store/Pricing.js +++ b/services/static-webserver/client/source/class/osparc/store/Pricing.js @@ -82,6 +82,9 @@ qx.Class.define("osparc.store.Pricing", { }, fetchPricingUnits: function(pricingPlanId) { + if (this.getPricingPlan(pricingPlanId) && this.getPricingPlan(pricingPlanId).getPricingUnits().length !== 0) { + return new Promise(resolve => resolve(this.getPricingPlan(pricingPlanId).getPricingUnits())); + } const params = { url: { pricingPlanId, @@ -100,6 +103,38 @@ qx.Class.define("osparc.store.Pricing", { }); }, + createPricingUnit: function(pricingPlanId, pricingUnitData) { + const params = { + url: { + "pricingPlanId": pricingPlanId + }, + data: pricingUnitData + }; + return osparc.data.Resources.fetch("pricingUnits", "post", params) + .then(newPricingUnitData => { + const pricingPlan = this.getPricingPlan(pricingPlanId); + this.__addPricingUnitToCache(pricingPlan, newPricingUnitData); + return pricingPlan; + }) + }, + + updatePricingUnit: function(pricingPlanId, pricingUnitId, pricingUnitData) { + const params = { + url: { + "pricingPlanId": pricingPlanId, + "pricingUnitId": pricingUnitId, + }, + data: pricingUnitData + }; + return osparc.data.Resources.fetch("pricingUnits", "post", params) + .then(() => { + const pricingPlan = this.getPricingPlan(pricingPlanId); + // OM do not add but replace + this.__addPricingUnitToCache(pricingPlan, pricingUnitData); + return pricingPlan; + }) + }, + getPricingPlans: function() { return this.pricingPlansCached; }, diff --git a/services/static-webserver/client/source/class/osparc/store/Products.js b/services/static-webserver/client/source/class/osparc/store/Products.js index 4728bded609..38ef8ad2e6f 100644 --- a/services/static-webserver/client/source/class/osparc/store/Products.js +++ b/services/static-webserver/client/source/class/osparc/store/Products.js @@ -15,34 +15,82 @@ ************************************************************************ */ +/** + * @asset(osparc/ui_config.json") + * @asset(schemas/product-ui.json) + * @asset(object-path/object-path-0-11-4.min.js) + * @asset(ajv/ajv-6-11-0.min.js) + * @ignore(Ajv) + */ + qx.Class.define("osparc.store.Products", { extend: qx.core.Object, type: "singleton", members: { - __newStudyConfig: null, - - fetchNewStudyConfig: function() { - return osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json") - .then(newStudiesData => { - const product = osparc.product.Utils.getProductName() - if (product in newStudiesData) { - this.__newStudyConfig = newStudiesData[product]; - return this.__newStudyConfig; - } - return {}; - }) - .catch(console.error); - }, + __uiConfig: null, - getNewStudyConfig: function() { + fetchUiConfig: function() { return new Promise(resolve => { - if (this.__newStudyConfig) { - resolve(this.__newStudyConfig); - } else { - resolve(this.fetchNewStudyConfig()) + if (osparc.auth.Data.getInstance().isGuest()) { + this.__uiConfig = {}; + resolve(this.__uiConfig); } + + Promise.all([ + osparc.data.Resources.fetch("productMetadata", "getUiConfig"), + osparc.utils.Utils.fetchJSON("/resource/osparc/ui_config.json"), + osparc.utils.Utils.fetchJSON("/resource/schemas/product-ui.json"), + ]) + .then(values => { + let uiConfig = {}; + const beUiConfig = values[0]; + const feUiConfig = values[1]; + const schema = values[2]; + if (beUiConfig && beUiConfig["ui"] && Object.keys(beUiConfig["ui"]).length) { + uiConfig = beUiConfig["ui"]; + } else { + const product = osparc.product.Utils.getProductName(); + if (feUiConfig && product in feUiConfig) { + uiConfig = feUiConfig[product]; + } + } + const ajvLoader = new qx.util.DynamicScriptLoader([ + "/resource/ajv/ajv-6-11-0.min.js", + "/resource/object-path/object-path-0-11-4.min.js" + ]); + ajvLoader.addListener("ready", () => { + const ajv = new Ajv({ + allErrors: true, + strictDefaults: true, + useDefaults: true, + strictTypes: true, + }); + const validate = ajv.compile(schema); + const valid = validate(uiConfig); + if (valid) { + this.__uiConfig = uiConfig; + resolve(this.__uiConfig); + } else { + osparc.FlashMessenger.logError("Wrong product.ui config"); + validate.errors.forEach(err => { + console.error(`Error at ${err.dataPath}: ${err.message}`); + }); + } + }); + ajvLoader.addListener("failed", console.error, this); + ajvLoader.start(); + }) + .catch(console.error); }); }, + + getPlusButtonUiConfig: function() { + return this.__uiConfig["plusButton"]; + }, + + getNewStudiesUiConfig: function() { + return this.__uiConfig["newStudies"]; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Services.js b/services/static-webserver/client/source/class/osparc/store/Services.js index c2abeed32ec..a01d4cf91a0 100644 --- a/services/static-webserver/client/source/class/osparc/store/Services.js +++ b/services/static-webserver/client/source/class/osparc/store/Services.js @@ -19,11 +19,11 @@ qx.Class.define("osparc.store.Services", { type: "static", statics: { - servicesCached: {}, + __servicesCached: {}, getServicesLatest: function(useCache = true) { return new Promise(resolve => { - if (useCache && Object.keys(this.servicesCached)) { + if (useCache && Object.keys(this.__servicesCached)) { // return latest only const latest = this.__getLatestCached(); resolve(latest); @@ -37,21 +37,123 @@ qx.Class.define("osparc.store.Services", { this.__addTSRInfos(servicesObj); this.__addExtraTypeInfos(servicesObj); - // use response to populate servicesCached Object.values(servicesObj).forEach(serviceKey => { - Object.values(serviceKey).forEach(srv => this.__addToCache(srv)); + Object.values(serviceKey).forEach(service => this.__addToCache(service)); }); resolve(servicesObj); }) + .catch(err => osparc.FlashMessenger.logError(err, qx.locale.Manager.tr("Unable to fetch Services"))); + }); + }, + + getLatest: function(key) { + const services = this.__servicesCached; + if (key in services) { + const latestMetadata = Object.values(services[key])[0]; + if (!osparc.service.Utils.isRetired(latestMetadata)) { + return latestMetadata; + } + } + return null; + }, + + getLatestCompatible: function(key, version) { + const services = this.__servicesCached; + if (key in services && version in services[key]) { + const historyEntry = osparc.service.Utils.extractVersionFromHistory(services[key][version]); + if (historyEntry["compatibility"] && historyEntry["compatibility"]["canUpdateTo"]) { + const canUpdateTo = historyEntry["compatibility"]["canUpdateTo"]; + return { + key: "key" in canUpdateTo ? canUpdateTo["key"] : key, // key is optional + version: canUpdateTo["version"] + }; + } + // the provided key/version itself is the latest compatible + return { + key, + version + }; + } + return null; + }, + + getVersionDisplay: function(key, version) { + const services = this.__servicesCached; + if (key in services && version in services[key]) { + return osparc.service.Utils.extractVersionDisplay(services[key][version]); + } + return null; + }, + + getReleasedDate: function(key, version) { + const services = this.__servicesCached; + if ( + key in services && + version in services[key] && + "released" in services[key][version] + ) { + return services[key][version]["released"]; + } + return null; + }, + + getService: function(key, version, useCache = true) { + return new Promise((resolve, reject) => { + if ( + useCache && + this.__isInCache(key, version) && + "history" in this.__servicesCached[key][version] + ) { + resolve(this.__servicesCached[key][version]); + return; + } + + const params = { + url: osparc.data.Resources.getServiceUrl(key, version) + }; + osparc.data.Resources.fetch("servicesV2", "getOne", params) + .then(service => { + this.__addHit(service); + this.__addTSRInfo(service); + this.__addExtraTypeInfo(service); + this.__addToCache(service) + resolve(service); + }) .catch(err => { - const msg = err.message || qx.locale.Manager.tr("Unable to fetch Services"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); console.error(err); + reject(); }); }); }, + __getAllVersions: function(key) { + const services = this.__servicesCached; + let versions = []; + if (key in services) { + const serviceVersions = services[key]; + versions = versions.concat(Object.keys(serviceVersions)); + versions.sort(osparc.utils.Utils.compareVersionNumbers); + } + return versions.reverse(); + }, + + populateVersionsSelectBox: function(key, selectBox) { + const versions = this.__getAllVersions(key); + return this.getService(key, versions[0]) + .then(latestMetadata => { + latestMetadata["history"].forEach(historyEntry => { + if (!historyEntry["retired"]) { + const versionDisplay = osparc.service.Utils.extractVersionDisplay(historyEntry); + const listItem = new qx.ui.form.ListItem(versionDisplay); + osparc.utils.Utils.setIdToWidget(listItem, "serviceVersionItem_" + versionDisplay); + listItem.version = historyEntry["version"]; + selectBox.add(listItem); + } + }); + }); + }, + getServicesLatestList: function(excludeFrontend = true, excludeDeprecated = true) { return new Promise(resolve => { const servicesList = []; @@ -65,19 +167,25 @@ qx.Class.define("osparc.store.Services", { // do not add frontend services continue; } - if (excludeDeprecated && serviceLatest["retired"]) { - // first check if a previous version of this service isn't retired - let versions = Object.keys(this.servicesCached[key]); - versions = versions.sort(osparc.utils.Utils.compareVersionNumbers).reverse(); - for (let j=0; j { - console.error(err); - }) + .catch(err => console.error(err)) .finally(() => resolve(servicesList)); }); }, - getService: function(key, version, useCache = true) { - return new Promise(resolve => { - if (useCache && this.__isInCache(key, version)) { - resolve(this.servicesCached[key][version]); - return; - } - - const params = { - url: osparc.data.Resources.getServiceUrl(key, version) - }; - osparc.data.Resources.getOne("servicesV2", params) - .then(service => { - this.__addHit(service); - this.__addTSRInfo(service); - this.__addExtraTypeInfo(service); - this.__addToCache(service) - resolve(service); - }) - .catch(console.error); - }); - }, - getResources: function(key, version) { return new Promise(resolve => { if ( this.__isInCache(key, version) && - "resources" in this.servicesCached[key][version] + "resources" in this.__servicesCached[key][version] ) { - resolve(this.servicesCached[key][version]["resources"]); + resolve(this.__servicesCached[key][version]["resources"]); return; } @@ -129,7 +213,7 @@ qx.Class.define("osparc.store.Services", { }; osparc.data.Resources.get("serviceResources", params) .then(resources => { - this.servicesCached[key][version]["resources"] = resources; + this.__servicesCached[key][version]["resources"] = resources; resolve(resources); }); }); @@ -137,7 +221,7 @@ qx.Class.define("osparc.store.Services", { getMetadata: function(key, version) { if (this.__isInCache(key, version)) { - return this.servicesCached[key][version]; + return this.__servicesCached[key][version]; } return null; }, @@ -153,50 +237,97 @@ qx.Class.define("osparc.store.Services", { }; return osparc.data.Resources.fetch("servicesV2", "patch", params) .then(() => { - this.servicesCached[key][version][fieldKey] = value; + this.__servicesCached[key][version][fieldKey] = value; serviceData[fieldKey] = value; }); }, + getStudyServicesMetadata: function(studyData) { + const wbServices = osparc.study.Utils.extractUniqueServices(studyData["workbench"]); + const promises = []; + wbServices.forEach(srv => { + promises.push(this.getService(srv["key"], srv["version"])); + }); + return Promise.all(promises); + }, + + getInaccessibleServices: function(workbench) { + const allServices = this.__servicesCached; + const unaccessibleServices = []; + const wbServices = osparc.study.Utils.extractUniqueServices(workbench); + wbServices.forEach(srv => { + if (srv.key in allServices && srv.version in allServices[srv.key]) { + return; + } + const idx = unaccessibleServices.findIndex(unSrv => unSrv.key === srv.key && unSrv.version === srv.version); + if (idx === -1) { + unaccessibleServices.push(srv); + } + }); + return unaccessibleServices; + }, + + getInaccessibleServicesMsg: function(inaccessibleServices, workbench) { + let msg = qx.locale.Manager.tr("Some services are inaccessible:
"); + Object.values(workbench).forEach(node => { + const inaccessibleService = inaccessibleServices.find(srv => srv.key === node.key && srv.version === node.version); + if (inaccessibleService) { + const n = inaccessibleService.key.lastIndexOf("/"); + const friendlyKey = inaccessibleService.key.substring(n + 1); + msg += `- ${node.label} (${friendlyKey}:${inaccessibleService.version})
`; + } + }); + return msg; + }, + + getFilePicker: function() { + return this.getLatest("simcore/services/frontend/file-picker"); + }, + + getParametersMetadata: function() { + const parametersMetadata = []; + const services = this.__servicesCached; + for (const key in services) { + if (key.includes("simcore/services/frontend/parameter/")) { + const latest = this.getLatest(key); + if (latest) { + parametersMetadata.push(latest); + } + } + } + return parametersMetadata; + }, + + getParameterMetadata: function(type) { + return this.getLatest("simcore/services/frontend/parameter/"+type); + }, + + getProbeMetadata: function(type) { + return this.getLatest("simcore/services/frontend/iterator-consumer/probe/"+type); + }, + __addToCache: function(service) { const key = service.key; const version = service.version; - if (!(key in this.servicesCached)) { - this.servicesCached[key] = {}; - } - this.servicesCached[key][version] = service; - this.servicesCached[key][version]["cached"] = true; - - if ("history" in service) { - service["history"].forEach(historyEntry => { - const hVersion = historyEntry.version; - if (!(hVersion in this.servicesCached[key])) { - this.servicesCached[key][hVersion] = {}; - this.servicesCached[key][hVersion]["cached"] = false; - } - // merge history data into current metadata - this.servicesCached[key][hVersion] = { - ...this.servicesCached[key][hVersion], - ...historyEntry - }; - }); + if (!(key in this.__servicesCached)) { + this.__servicesCached[key] = {}; } + this.__servicesCached[key][version] = service; }, __isInCache: function(key, version) { return ( - key in this.servicesCached && - version in this.servicesCached[key] && - this.servicesCached[key][version]["cached"] + key in this.__servicesCached && + version in this.__servicesCached[key] ); }, __getLatestCached: function() { const latestServices = {}; - for (const key in this.servicesCached) { - let versions = Object.keys(this.servicesCached[key]); + for (const key in this.__servicesCached) { + let versions = Object.keys(this.__servicesCached[key]); versions = versions.sort(osparc.utils.Utils.compareVersionNumbers).reverse(); - const latest = this.servicesCached[key][versions[0]]; + const latest = this.__servicesCached[key][versions[0]]; latestServices[key] = osparc.utils.Utils.deepCloneObject(latest); } return latestServices; diff --git a/services/static-webserver/client/source/class/osparc/store/StaticInfo.js b/services/static-webserver/client/source/class/osparc/store/StaticInfo.js index 201966cb7fa..341827779eb 100644 --- a/services/static-webserver/client/source/class/osparc/store/StaticInfo.js +++ b/services/static-webserver/client/source/class/osparc/store/StaticInfo.js @@ -70,9 +70,8 @@ qx.Class.define("osparc.store.StaticInfo", { }, areLicensesEnabled: function() { - const staticKey = "webserverLicenses"; - const licensesEnabled = this.getValue(staticKey); - return Boolean(licensesEnabled); + const isDisabled = osparc.utils.DisabledPlugins.isLicensesDisabled(); + return !isDisabled; }, getTrashRetentionDays: function() { diff --git a/services/static-webserver/client/source/class/osparc/store/Study.js b/services/static-webserver/client/source/class/osparc/store/Study.js new file mode 100644 index 00000000000..9f57f5281ac --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/store/Study.js @@ -0,0 +1,115 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.store.Study", { + type: "static", + + statics: { + patchStudyData: function(studyData, fieldKey, value) { + if (osparc.data.model.Study.OwnPatch.includes(fieldKey)) { + console.error(fieldKey, "has it's own PATCH path"); + return null; + } + + const patchData = {}; + patchData[fieldKey] = value; + const params = { + url: { + "studyId": studyData["uuid"] + }, + data: patchData + }; + return osparc.data.Resources.fetch("studies", "patch", params) + .then(() => { + studyData[fieldKey] = value; + // A bit hacky, but it's not sent back to the backend + studyData["lastChangeDate"] = new Date().toISOString(); + }); + }, + + patchNodeData: function(studyData, nodeId, patchData) { + const params = { + url: { + "studyId": studyData["uuid"], + "nodeId": nodeId + }, + data: patchData + }; + return osparc.data.Resources.fetch("studies", "patchNode", params) + .then(() => { + Object.keys(patchData).forEach(key => { + studyData["workbench"][nodeId][key] = patchData[key]; + }); + // A bit hacky, but it's not sent back to the backend + studyData["lastChangeDate"] = new Date().toISOString(); + }); + }, + + addCollaborators: function(studyData, newCollaborators) { + const promises = []; + Object.keys(newCollaborators).forEach(gid => { + const params = { + url: { + "studyId": studyData["uuid"], + "gId": gid + }, + data: newCollaborators[gid] + }; + promises.push(osparc.data.Resources.fetch("studies", "postAccessRights", params)); + }); + return Promise.all(promises) + .then(() => { + Object.keys(newCollaborators).forEach(gid => { + studyData["accessRights"][gid] = newCollaborators[gid]; + }); + studyData["lastChangeDate"] = new Date().toISOString(); + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + removeCollaborator: function(studyData, gid) { + const params = { + url: { + "studyId": studyData["uuid"], + "gId": gid + } + }; + return osparc.data.Resources.fetch("studies", "deleteAccessRights", params) + .then(() => { + delete studyData["accessRights"][gid]; + studyData["lastChangeDate"] = new Date().toISOString(); + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + updateCollaborator: function(studyData, gid, newPermissions) { + const params = { + url: { + "studyId": studyData["uuid"], + "gId": gid + }, + data: newPermissions + }; + return osparc.data.Resources.fetch("studies", "putAccessRights", params) + .then(() => { + studyData["accessRights"][gid] = newPermissions; + studyData["lastChangeDate"] = new Date().toISOString(); + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/store/Support.js b/services/static-webserver/client/source/class/osparc/store/Support.js index 65e5174929d..281127d06ff 100644 --- a/services/static-webserver/client/source/class/osparc/store/Support.js +++ b/services/static-webserver/client/source/class/osparc/store/Support.js @@ -132,6 +132,14 @@ qx.Class.define("osparc.store.Support", { }); }, + addReleaseNotesToMenu: function(menu) { + const releaseTag = osparc.utils.Utils.getReleaseTag(); + const releaseLink = osparc.utils.Utils.getReleaseLink(); + const releaseBtn = new qx.ui.menu.Button(qx.locale.Manager.tr("Release Notes") + " " + releaseTag, "@FontAwesome5Solid/book/14"); + releaseBtn.addListener("execute", () => window.open(releaseLink), this); + menu.add(releaseBtn); + }, + mailToLink: function(email, subject, centered = true) { const color = qx.theme.manager.Color.getInstance().resolve("text"); let textLink = `${email}`; @@ -181,6 +189,6 @@ qx.Class.define("osparc.store.Support", { } createAccountWindow.center(); createAccountWindow.open(); - } + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Tags.js b/services/static-webserver/client/source/class/osparc/store/Tags.js index c2830262cd9..d672722f516 100644 --- a/services/static-webserver/client/source/class/osparc/store/Tags.js +++ b/services/static-webserver/client/source/class/osparc/store/Tags.js @@ -45,7 +45,6 @@ qx.Class.define("osparc.store.Tags", { tagsData.forEach(tagData => { const tag = this.__addToCache(tagData); tags.push(tag); - this.fetchAccessRights(tag); }); return tags; }); @@ -106,7 +105,11 @@ qx.Class.define("osparc.store.Tags", { .catch(console.error); }, - fetchAccessRights: function(tag) { + fetchAccessRights: function(tag, reload = false) { + if (reload === false && tag.getAccessRights() !== null) { + return new Promise(resolve => resolve()); + } + const params = { url: { "tagId": tag.getTagId() diff --git a/services/static-webserver/client/source/class/osparc/study/BillingSettings.js b/services/static-webserver/client/source/class/osparc/study/BillingSettings.js index 623fc9ae0f3..85a4b83cdcb 100644 --- a/services/static-webserver/client/source/class/osparc/study/BillingSettings.js +++ b/services/static-webserver/client/source/class/osparc/study/BillingSettings.js @@ -192,7 +192,7 @@ qx.Class.define("osparc.study.BillingSettings", { if (myWallets.find(wllt => wllt === wallet)) { // It's my wallet this._createChildControlImpl("debt-explanation").set({ - value: this.tr("Top up the Credit Account:
Purchase additional credits to bring the Credit Account balance back to a positive value.") + value: this.tr("Top up the Credit Account:
Purchase additional credits to restore a positive balance.") }); const buyCreditsButton = this._createChildControlImpl("buy-credits-button"); buyCreditsButton.addListener("execute", () => this.__openBuyCreditsWindow(), this); @@ -258,10 +258,7 @@ qx.Class.define("osparc.study.BillingSettings", { // also switch the study's wallet to this one this.__switchWallet(wallet.getWalletId()); }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }, __debtPayed: function() { @@ -287,12 +284,9 @@ qx.Class.define("osparc.study.BillingSettings", { .then(() => { this.__studyWalletId = walletId; const msg = this.tr("Credit Account saved"); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); - }) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logAs(msg, "INFO"); }) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => { creditAccountBox.setEnabled(true); }); diff --git a/services/static-webserver/client/source/class/osparc/study/Import.js b/services/static-webserver/client/source/class/osparc/study/Import.js index 87b2e58bbfb..fd63aaaef6c 100644 --- a/services/static-webserver/client/source/class/osparc/study/Import.js +++ b/services/static-webserver/client/source/class/osparc/study/Import.js @@ -49,7 +49,7 @@ qx.Class.define("osparc.study.Import", { const size = file.size; const maxSize = 10 * 1000 * 1000 * 1000; // 10 GB if (size > maxSize) { - osparc.FlashMessenger.logAs(`The file is too big. Maximum size is ${maxSize}MB. Please provide with a smaller file or a repository URL.`, "ERROR"); + osparc.FlashMessenger.logError(`The file is too big. Maximum size is ${maxSize}MB. Please provide with a smaller file or a repository URL.`); return; } this.fireDataEvent("fileReady", file); diff --git a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js index f6bc409fb39..dae4d7370c4 100644 --- a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js +++ b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js @@ -100,8 +100,7 @@ qx.Class.define("osparc.study.NodePricingUnits", { const studyId = this.getStudyId(); const nodeId = this.getNodeId(); - const pricingStore = osparc.store.Pricing.getInstance(); - pricingStore.fetchPricingPlansService(nodeKey, nodeVersion) + osparc.store.Pricing.getInstance().fetchPricingPlansService(nodeKey, nodeVersion) .then(pricingPlanData => { if (pricingPlanData) { const unitParams = { @@ -132,10 +131,7 @@ qx.Class.define("osparc.study.NodePricingUnits", { const pricingPlanId = this.getPricingPlanId(); this.self().patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId) .then(() => pricingUnitTiers.setSelectedUnitId(selectedPricingUnitId)) - .catch(err => { - const msg = err.message || this.tr("Cannot change Tier"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Cannot change Tier"))) .finally(() => pricingUnitTiers.setEnabled(true)); } }); diff --git a/services/static-webserver/client/source/class/osparc/study/PricingUnitLicense.js b/services/static-webserver/client/source/class/osparc/study/PricingUnitLicense.js index 47335fef924..0563af22af4 100644 --- a/services/static-webserver/client/source/class/osparc/study/PricingUnitLicense.js +++ b/services/static-webserver/client/source/class/osparc/study/PricingUnitLicense.js @@ -74,7 +74,7 @@ qx.Class.define("osparc.study.PricingUnitLicense", { // add price info const price = this.getChildControl("price"); pricingUnit.bind("cost", price, "value", { - converter: v => this.tr("Credits") + ": " + v + converter: v => this.tr("Credits") + ": " + osparc.utils.Utils.addWhiteSpaces(v) }); // add edit button @@ -85,12 +85,17 @@ qx.Class.define("osparc.study.PricingUnitLicense", { }, __rentUnit: function() { + const nSeats = parseInt(this.getUnitData().getExtraInfo()["num_of_seats"]); + const nCredits = this.getUnitData().getCost(); const expirationDate = osparc.study.PricingUnitLicense.getExpirationDate(); - const msg = this.getUnitData().getName() + this.tr(" will be available until ") + osparc.utils.Utils.formatDate(expirationDate); + let msg = nSeats + " seat" + (nSeats > 1 ? "s " : " ") + this.tr("will be available until ") + osparc.utils.Utils.formatDate(expirationDate); + msg += `
The rental will cost ${nCredits} credits`; + msg += `
I hereby accept the Terms and Conditions`; const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Rent"), confirmText: this.tr("Rent"), }); + confirmationWin.open(); confirmationWin.addListener("close", () => { if (confirmationWin.getConfirmed()) { diff --git a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js index 1ed9a4cd961..37cd905c33c 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js @@ -91,12 +91,8 @@ qx.Class.define("osparc.study.StudyOptions", { }, updateName: function(studyData, name) { - return osparc.info.StudyUtils.patchStudyData(studyData, "name", name) - .catch(err => { - console.error(err); - const msg = err.message || qx.locale.Manager.tr("Something went wrong Renaming"); - osparc.FlashMessenger.logAs(msg, "ERROR"); - }); + return osparc.store.Study.patchStudyData(studyData, "name", name) + .catch(err => osparc.FlashMessenger.logError(err, qx.locale.Manager.tr("Something went wrong while renaming"))); }, updateWallet: function(studyId, walletId) { @@ -107,11 +103,7 @@ qx.Class.define("osparc.study.StudyOptions", { } }; return osparc.data.Resources.fetch("studies", "selectWallet", params) - .catch(err => { - console.error(err); - const msg = err.message || qx.locale.Manager.tr("Error selecting Credit Account"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - }); + .catch(err => osparc.FlashMessenger.logError(err, qx.locale.Manager.tr("An issue occurred while selecting Credit Account"))); }, }, @@ -245,7 +237,7 @@ qx.Class.define("osparc.study.StudyOptions", { } }; Promise.all([ - osparc.data.Resources.getOne("studies", params), + osparc.data.Resources.fetch("studies", "getOne", params), osparc.data.Resources.fetch("studies", "getWallet", params) ]) .then(values => { diff --git a/services/static-webserver/client/source/class/osparc/study/StudyPreview.js b/services/static-webserver/client/source/class/osparc/study/StudyPreview.js index 62d5d01470f..c1d6af633be 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyPreview.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyPreview.js @@ -37,7 +37,7 @@ qx.Class.define("osparc.study.StudyPreview", { __buildPreview: function() { const study = this.__study; const uiMode = study.getUi().getMode(); - if (uiMode !== "app" && !study.isPipelineEmpty()) { + if (uiMode === "workbench" && !study.isPipelineEmpty()) { const workbenchUIPreview = new osparc.workbench.WorkbenchUIPreview(); workbenchUIPreview.setStudy(study); workbenchUIPreview.loadModel(study.getWorkbench()); diff --git a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js index e3e8514fbaf..ccc9a673ccb 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js @@ -38,8 +38,8 @@ qx.Class.define("osparc.study.StudyPricingUnits", { }, statics: { - includeInList: function(node) { - return !osparc.data.model.Node.isFrontend(node); + includeInList: function(nodeData) { + return !osparc.data.model.Node.isFrontend(nodeData); }, }, @@ -61,9 +61,9 @@ qx.Class.define("osparc.study.StudyPricingUnits", { if ("workbench" in this.__studyData) { const workbench = this.__studyData["workbench"]; Object.keys(workbench).forEach(nodeId => { - const node = workbench[nodeId]; - if (this.self().includeInList(node)) { - const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node); + const nodeData = workbench[nodeId]; + if (this.self().includeInList(nodeData)) { + const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, nodeData); this.__nodePricingUnits.push(nodePricingUnits); this._add(nodePricingUnits); promises.push(nodePricingUnits.showPricingUnits()); diff --git a/services/static-webserver/client/source/class/osparc/study/Utils.js b/services/static-webserver/client/source/class/osparc/study/Utils.js index 445b2f82c31..f548d08b85a 100644 --- a/services/static-webserver/client/source/class/osparc/study/Utils.js +++ b/services/static-webserver/client/source/class/osparc/study/Utils.js @@ -23,88 +23,79 @@ qx.Class.define("osparc.study.Utils", { type: "static", statics: { - extractServices: function(workbench) { - const services = []; + __isAnyLinkedNodeMissing: function(studyData) { + const existingNodeIds = Object.keys(studyData["workbench"]); + const linkedNodeIds = osparc.data.model.Workbench.getLinkedNodeIds(studyData["workbench"]); + const allExist = linkedNodeIds.every(linkedNodeId => existingNodeIds.includes(linkedNodeId)); + return !allExist; + }, + + isCorrupt: function(studyData) { + return this.__isAnyLinkedNodeMissing(studyData); + }, + + extractUniqueServices: function(workbench) { + const services = new Set([]); Object.values(workbench).forEach(srv => { - services.push({ + services.add({ key: srv.key, version: srv.version }); }); - return services; + return Array.from(services); }, - getInaccessibleServices: function(workbench) { - const allServices = osparc.store.Services.servicesCached; - const unaccessibleServices = []; - const wbServices = new Set(this.extractServices(workbench)); - wbServices.forEach(srv => { - if (srv.key in allServices && srv.version in allServices[srv.key]) { - return; - } - const idx = unaccessibleServices.findIndex(unSrv => unSrv.key === srv.key && unSrv.version === srv.version); - if (idx === -1) { - unaccessibleServices.push(srv); - } - }); - return unaccessibleServices; + getCantExecuteServices: function(studyServices = []) { + return studyServices.filter(studyService => studyService["myAccessRights"]["execute"] === false); }, - getInaccessibleServicesMsg: function(inaccessibleServices, workbench) { - let msg = qx.locale.Manager.tr("Service(s) not accessible:
"); - Object.values(workbench).forEach(node => { - const inaccessibleService = inaccessibleServices.find(srv => srv.key === node.key && srv.version === node.version); - if (inaccessibleService) { - const n = inaccessibleService.key.lastIndexOf("/"); - const friendlyKey = inaccessibleService.key.substring(n + 1); - msg += `- ${node.label} (${friendlyKey}:${inaccessibleService.version})
`; + anyServiceRetired: function(studyServices) { + const isRetired = studyServices.some(studyService => { + if (studyService["release"] && studyService["release"]["retired"]) { + const retirementDate = new Date(studyService["release"]["retired"]); + const currentDate = new Date(); + return retirementDate < currentDate; } + return false; }); - return msg; - }, - - isWorkbenchUpdatable: function(workbench) { - const services = new Set(this.extractServices(workbench)); - const isUpdatable = Array.from(services).some(srv => osparc.service.Utils.isUpdatable(srv)); - return isUpdatable; + return isRetired; }, - isWorkbenchRetired: function(workbench) { - const allServices = osparc.store.Services.servicesCached; - const services = new Set(this.extractServices(workbench)); - const isRetired = Array.from(services).some(srv => { - if (srv.key in allServices && srv.version in allServices[srv.key]) { - const serviceMD = allServices[srv.key][srv.version]; - if (serviceMD["retired"]) { - const retirementDate = new Date(serviceMD["retired"]); - const currentDate = new Date(); - return retirementDate < currentDate; - } - return false; + anyServiceDeprecated: function(studyServices) { + const isDeprecated = studyServices.some(studyService => { + if (studyService["release"] && studyService["release"]["retired"]) { + const retirementDate = new Date(studyService["release"]["retired"]); + const currentDate = new Date(); + return retirementDate > currentDate; } return false; }); - return isRetired; + return isDeprecated; }, - isWorkbenchDeprecated: function(workbench) { - const allServices = osparc.store.Services.servicesCached; - const services = new Set(this.extractServices(workbench)); - const isRetired = Array.from(services).some(srv => { - if (srv.key in allServices && srv.version in allServices[srv.key]) { - const serviceMD = allServices[srv.key][srv.version]; - if ("retired" in serviceMD && serviceMD["retired"]) { - const retirementDate = new Date(serviceMD["retired"]); - const currentDate = new Date(); - return retirementDate > currentDate; - } - return false; + anyServiceUpdatable: function(studyServices) { + const isUpdatable = studyServices.some(studyService => { + if (studyService["release"] && studyService["release"]["compatibility"]) { + return Boolean(studyService["release"]["compatibility"]); } return false; }); - return isRetired; + return isUpdatable; + }, + + updatableNodeIds: function(workbench, studyServices) { + const nodeIds = []; + for (const nodeId in workbench) { + const node = workbench[nodeId]; + const studyServiceFound = studyServices.find(studyService => studyService["key"] === node["key"] && studyService["release"]["version"] === node["version"]); + if (studyServiceFound && studyServiceFound["release"] && studyServiceFound["release"]["compatibility"]) { + nodeIds.push(nodeId); + } + } + return nodeIds; }, + createStudyFromService: function(key, version, existingStudies, newStudyLabel, contextProps = {}) { return new Promise((resolve, reject) => { osparc.store.Services.getService(key, version) @@ -142,9 +133,13 @@ qx.Class.define("osparc.study.Utils", { "y": 100 } }; - const inaccessibleServices = this.getInaccessibleServices(minStudyData["workbench"]) + // maybe check it's dynamic + if (!("mode" in minStudyData["ui"])) { + minStudyData["ui"]["mode"] = "standalone"; + } + const inaccessibleServices = osparc.store.Services.getInaccessibleServices(minStudyData["workbench"]) if (inaccessibleServices.length) { - const msg = this.getInaccessibleServicesMsg(inaccessibleServices, minStudyData["workbench"]); + const msg = osparc.store.Services.getInaccessibleServicesMsg(inaccessibleServices, minStudyData["workbench"]); reject({ message: msg }); @@ -157,10 +152,7 @@ qx.Class.define("osparc.study.Utils", { .then(studyData => resolve(studyData["uuid"])) .catch(err => reject(err)); }) - .catch(err => { - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); - }); + .catch(err => osparc.FlashMessenger.logError(err)); }); }, @@ -170,7 +162,7 @@ qx.Class.define("osparc.study.Utils", { pollTask: true }; const fetchPromise = osparc.data.Resources.fetch("studies", "postNewStudy", params, options); - const pollTasks = osparc.data.PollTasks.getInstance(); + const pollTasks = osparc.store.PollTasks.getInstance(); const interval = 1000; pollTasks.createPollingTask(fetchPromise, interval) .then(task => { @@ -189,80 +181,83 @@ qx.Class.define("osparc.study.Utils", { createStudyFromTemplate: function(templateData, loadingPage, contextProps = {}) { return new Promise((resolve, reject) => { - const inaccessibleServices = this.getInaccessibleServices(templateData["workbench"]); - if (inaccessibleServices.length) { - const msg = this.getInaccessibleServicesMsg(inaccessibleServices, templateData["workbench"]); - reject({ - message: msg + osparc.store.Services.getStudyServicesMetadata(templateData) + .finally(() => { + const inaccessibleServices = osparc.store.Services.getInaccessibleServices(templateData["workbench"]); + if (inaccessibleServices.length) { + const msg = osparc.store.Services.getInaccessibleServicesMsg(inaccessibleServices, templateData["workbench"]); + reject({ + message: msg + }); + return; + } + // context props, otherwise Study will be created in the root folder of my personal workspace + const minStudyData = Object.assign(osparc.data.model.Study.createMinStudyObject(), contextProps); + minStudyData["name"] = templateData["name"]; + minStudyData["description"] = templateData["description"]; + minStudyData["thumbnail"] = templateData["thumbnail"]; + const params = { + url: { + templateId: templateData["uuid"] + }, + data: minStudyData + }; + const options = { + pollTask: true + }; + const fetchPromise = osparc.data.Resources.fetch("studies", "postNewStudyFromTemplate", params, options); + const pollTasks = osparc.store.PollTasks.getInstance(); + const interval = 1000; + pollTasks.createPollingTask(fetchPromise, interval) + .then(task => { + const title = qx.locale.Manager.tr("CREATING ") + osparc.product.Utils.getStudyAlias({allUpperCase: true}) + " ..."; + const progressSequence = new osparc.widget.ProgressSequence(title).set({ + minHeight: 180 // four tasks + }); + progressSequence.addOverallProgressBar(); + loadingPage.clearMessages(); + loadingPage.addWidgetToMessages(progressSequence); + task.addListener("updateReceived", e => { + const updateData = e.getData(); + if ("task_progress" in updateData && loadingPage) { + const progress = updateData["task_progress"]; + const message = progress["message"]; + const percent = progress["percent"] ? parseFloat(progress["percent"].toFixed(3)) : progress["percent"]; + progressSequence.setOverallProgress(percent); + const existingTask = progressSequence.getTask(message); + if (existingTask) { + // update task + osparc.widget.ProgressSequence.updateTaskProgress(existingTask, { + value: percent, + progressLabel: parseFloat((percent*100).toFixed(2)) + "%" + }); + } else { + // new task + // all the previous steps to 100% + progressSequence.getTasks().forEach(tsk => osparc.widget.ProgressSequence.updateTaskProgress(tsk, { + value: 1, + progressLabel: "100%" + })); + // and move to the next new task + const subTask = progressSequence.addNewTask(message); + osparc.widget.ProgressSequence.updateTaskProgress(subTask, { + value: percent, + progressLabel: "0%" + }); + } + } + }, this); + task.addListener("resultReceived", e => { + const studyData = e.getData(); + resolve(studyData); + }, this); + task.addListener("pollingError", e => { + const err = e.getData(); + reject(err); + }, this); + }) + .catch(err => reject(err)); }); - return; - } - // context props, otherwise Study will be created in the root folder of my personal workspace - const minStudyData = Object.assign(osparc.data.model.Study.createMinStudyObject(), contextProps); - minStudyData["name"] = templateData["name"]; - minStudyData["description"] = templateData["description"]; - minStudyData["thumbnail"] = templateData["thumbnail"]; - const params = { - url: { - templateId: templateData["uuid"] - }, - data: minStudyData - }; - const options = { - pollTask: true - }; - const fetchPromise = osparc.data.Resources.fetch("studies", "postNewStudyFromTemplate", params, options); - const pollTasks = osparc.data.PollTasks.getInstance(); - const interval = 1000; - pollTasks.createPollingTask(fetchPromise, interval) - .then(task => { - const title = qx.locale.Manager.tr("CREATING ") + osparc.product.Utils.getStudyAlias({allUpperCase: true}) + " ..."; - const progressSequence = new osparc.widget.ProgressSequence(title).set({ - minHeight: 180 // four tasks - }); - progressSequence.addOverallProgressBar(); - loadingPage.clearMessages(); - loadingPage.addWidgetToMessages(progressSequence); - task.addListener("updateReceived", e => { - const updateData = e.getData(); - if ("task_progress" in updateData && loadingPage) { - const progress = updateData["task_progress"]; - const message = progress["message"]; - const percent = progress["percent"] ? parseFloat(progress["percent"].toFixed(3)) : progress["percent"]; - progressSequence.setOverallProgress(percent); - const existingTask = progressSequence.getTask(message); - if (existingTask) { - // update task - osparc.widget.ProgressSequence.updateTaskProgress(existingTask, { - value: percent, - progressLabel: parseFloat((percent*100).toFixed(2)) + "%" - }); - } else { - // new task - // all the previous steps to 100% - progressSequence.getTasks().forEach(tsk => osparc.widget.ProgressSequence.updateTaskProgress(tsk, { - value: 1, - progressLabel: "100%" - })); - // and move to the next new task - const subTask = progressSequence.addNewTask(message); - osparc.widget.ProgressSequence.updateTaskProgress(subTask, { - value: percent, - progressLabel: "0%" - }); - } - } - }, this); - task.addListener("resultReceived", e => { - const studyData = e.getData(); - resolve(studyData); - }, this); - task.addListener("pollingError", e => { - const err = e.getData(); - reject(err); - }, this); - }) - .catch(err => reject(err)); }); }, @@ -270,9 +265,16 @@ qx.Class.define("osparc.study.Utils", { return Boolean("debt" in studyData && studyData["debt"] < 0); }, + getUiMode: function(studyData) { + if ("ui" in studyData && "mode" in studyData["ui"]) { + return studyData["ui"]["mode"]; + } + return null; + }, + __getBlockedState: function(studyData) { - if (studyData["workbench"]) { - const unaccessibleServices = osparc.study.Utils.getInaccessibleServices(studyData["workbench"]) + if (studyData["services"]) { + const unaccessibleServices = osparc.study.Utils.getCantExecuteServices(studyData["services"]) if (unaccessibleServices.length) { return "UNKNOWN_SERVICES"; } @@ -308,7 +310,7 @@ qx.Class.define("osparc.study.Utils", { canShowStudyData: function(studyData) { const blocked = this.__getBlockedState(studyData); - return [false].includes(blocked); + return ["UNKNOWN_SERVICES", false].includes(blocked); }, canShowPreview: function(studyData) { @@ -335,5 +337,69 @@ qx.Class.define("osparc.study.Utils", { const blocked = this.__getBlockedState(studyData); return ["UNKNOWN_SERVICES", false].includes(blocked); }, + + getNonFrontendNodes: function(studyData) { + return Object.values(studyData["workbench"]).filter(nodeData => !osparc.data.model.Node.isFrontend(nodeData)); + }, + + guessIcon: async function(studyData) { + if (osparc.product.Utils.isProduct("tis") || osparc.product.Utils.isProduct("tiplite")) { + return this.__guessTIPIcon(studyData); + } + const icon = await this.__guessIcon(studyData); + return icon; + }, + + __guessIcon: function(studyData) { + const defaultIcon = osparc.dashboard.CardBase.PRODUCT_ICON; + return new Promise(resolve => { + // the was to guess the TI type is to check the boot mode of the ti-postpro in the pipeline + const wbServices = this.self().getNonFrontendNodes(studyData); + if (wbServices.length === 1) { + const wbService = wbServices[0]; + osparc.store.Services.getService(wbService.key, wbService.version) + .then(serviceMetadata => { + if (serviceMetadata["icon"]) { + resolve(serviceMetadata["icon"]); + } + resolve(defaultIcon); + }); + } else if (wbServices.length > 1) { + resolve("osparc/icons/diagram.png"); + } else { + resolve(defaultIcon); + } + }); + }, + + __guessTIPIcon: function(studyData) { + // the was to guess the TI type is to check the boot mode of the ti-postpro in the pipeline + const tiPostpro = Object.values(studyData["workbench"]).find(srv => srv.key.includes("ti-postpro")); + if (tiPostpro && tiPostpro["bootOptions"]) { + switch (tiPostpro["bootOptions"]["boot_mode"]) { + case "0": + // classic TI + return "osparc/icons/TI.png"; + case "1": + // multichannel + return "osparc/icons/MC.png"; + case "2": + // phase-modulation + return "osparc/icons/PM.png"; + case "3": + // personalized TI + return "osparc/icons/pTI.png"; + case "4": + // personalized multichannel + return "osparc/icons/pMC.png"; + case "5": + // personalized phase-modulation + return "osparc/icons/pPM.png"; + default: + return "osparc/icons/TI.png"; + } + } + return "osparc/icons/TI.png"; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/task/TaskUI.js b/services/static-webserver/client/source/class/osparc/task/TaskUI.js index 513bf34ec91..3e79c75137b 100644 --- a/services/static-webserver/client/source/class/osparc/task/TaskUI.js +++ b/services/static-webserver/client/source/class/osparc/task/TaskUI.js @@ -133,16 +133,6 @@ qx.Class.define("osparc.task.TaskUI", { }, this); }, - start: function() { - const tasks = osparc.task.Tasks.getInstance(); - tasks.addTask(this); - }, - - stop: function() { - const tasks = osparc.task.Tasks.getInstance(); - tasks.removeTask(this); - }, - setIcon: function(source) { this.getChildControl("icon").getContentElement().removeClass("rotate"); this.getChildControl("icon").setSource(source); diff --git a/services/static-webserver/client/source/class/osparc/task/TasksButton.js b/services/static-webserver/client/source/class/osparc/task/TasksButton.js index cbf113d4eb3..0d6f85e24d4 100644 --- a/services/static-webserver/client/source/class/osparc/task/TasksButton.js +++ b/services/static-webserver/client/source/class/osparc/task/TasksButton.js @@ -27,10 +27,11 @@ qx.Class.define("osparc.task.TasksButton", { width: 30, alignX: "center", cursor: "pointer", - visibility: "excluded" + visibility: "excluded", + toolTipText: this.tr("Tasks"), }); - const tasks = osparc.task.Tasks.getInstance(); + const tasks = osparc.task.TasksContainer.getInstance(); tasks.getTasks().addListener("change", e => this.__updateTasksButton(), this); this.addListener("tap", () => this.__showTasks(), this); }, @@ -40,8 +41,8 @@ qx.Class.define("osparc.task.TasksButton", { let control; switch (id) { case "icon": { - control = new qx.ui.basic.Image("@FontAwesome5Solid/cog/24"); - osparc.utils.Utils.addClass(control.getContentElement(), "rotate"); + control = new qx.ui.basic.Image("@FontAwesome5Solid/cog/22"); + osparc.utils.Utils.addClass(control.getContentElement(), "rotateSlow"); const logoContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ alignY: "middle" @@ -74,7 +75,7 @@ qx.Class.define("osparc.task.TasksButton", { this._createChildControlImpl("icon"); const number = this.getChildControl("number"); - const tasks = osparc.task.Tasks.getInstance(); + const tasks = osparc.task.TasksContainer.getInstance(); const nTasks = tasks.getTasks().length; number.setValue(nTasks.toString()); nTasks ? this.show() : this.exclude(); @@ -83,7 +84,7 @@ qx.Class.define("osparc.task.TasksButton", { __showTasks: function() { const that = this; const tapListener = event => { - const tasks = osparc.task.Tasks.getInstance(); + const tasks = osparc.task.TasksContainer.getInstance(); const tasksContainer = tasks.getTasksContainer(); if (osparc.utils.Utils.isMouseOnElement(tasksContainer, event)) { return; @@ -103,14 +104,14 @@ qx.Class.define("osparc.task.TasksButton", { bounds.top = parseInt(rect.y); } } - const tasks = osparc.task.Tasks.getInstance(); + const tasks = osparc.task.TasksContainer.getInstance(); tasks.setTasksContainerPosition(bounds.left+bounds.width, osparc.navigation.NavigationBar.HEIGHT+3); tasks.getTasksContainer().show(); document.addEventListener("mousedown", tapListener); }, __hideTasks: function() { - const tasks = osparc.task.Tasks.getInstance(); + const tasks = osparc.task.TasksContainer.getInstance(); tasks.getTasksContainer().exclude(); } } diff --git a/services/static-webserver/client/source/class/osparc/task/Tasks.js b/services/static-webserver/client/source/class/osparc/task/TasksContainer.js similarity index 72% rename from services/static-webserver/client/source/class/osparc/task/Tasks.js rename to services/static-webserver/client/source/class/osparc/task/TasksContainer.js index 8494db9754b..6e22ee89c96 100644 --- a/services/static-webserver/client/source/class/osparc/task/Tasks.js +++ b/services/static-webserver/client/source/class/osparc/task/TasksContainer.js @@ -15,7 +15,7 @@ ************************************************************************ */ -qx.Class.define("osparc.task.Tasks", { +qx.Class.define("osparc.task.TasksContainer", { extend: qx.core.Object, type: "singleton", @@ -39,17 +39,21 @@ qx.Class.define("osparc.task.Tasks", { __tasks: null, __tasksContainer: null, - addTask: function(task) { - this.__tasks.push(task); - this.__tasksContainer.addAt(task, 0); + addTaskUI: function(taskUI) { + const alreadyExists = this.__tasks.filter(task => task.getTask().getTaskId() === taskUI.getTask().getTaskId()).length; + if (alreadyExists) { + return; + } + this.__tasks.push(taskUI); + this.__tasksContainer.addAt(taskUI, 0); }, - removeTask: function(task) { - if (this.__tasks.indexOf(task) > -1) { - this.__tasks.remove(task); + removeTaskUI: function(taskUI) { + if (this.__tasks.indexOf(taskUI) > -1) { + this.__tasks.remove(taskUI); } - if (this.__tasksContainer.indexOf(task) > -1) { - this.__tasksContainer.remove(task); + if (this.__tasksContainer.indexOf(taskUI) > -1) { + this.__tasksContainer.remove(taskUI); } }, diff --git a/services/static-webserver/client/source/class/osparc/tester/TesterCenter.js b/services/static-webserver/client/source/class/osparc/tester/TesterCenter.js index 023fafb4dc5..547bd4ae3d2 100644 --- a/services/static-webserver/client/source/class/osparc/tester/TesterCenter.js +++ b/services/static-webserver/client/source/class/osparc/tester/TesterCenter.js @@ -24,7 +24,7 @@ qx.Class.define("osparc.tester.TesterCenter", { const miniProfile = osparc.desktop.account.MyAccount.createMiniProfileView().set({ paddingRight: 10 }); - this.addWidgetOnTopOfTheTabs(miniProfile); + this.addWidgetToTabs(miniProfile); this.__addSocketMessagesPage(); this.__addConsoleErrorsPage(); diff --git a/services/static-webserver/client/source/class/osparc/theme/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/Appearance.js index 8af7db982cb..a1bc3cd837b 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Appearance.js +++ b/services/static-webserver/client/source/class/osparc/theme/Appearance.js @@ -22,7 +22,7 @@ qx.Theme.define("osparc.theme.Appearance", { "strong-ui": { style: () => { return { - textColor: "default-button-text", + textColor: "white", backgroundColor: "product-color", }; } @@ -719,6 +719,17 @@ qx.Theme.define("osparc.theme.Appearance", { } }, + "menu-wider": { + include: "menu", + style: () => { + return { + decorator: "border-simple", + font: "text-14", + padding: 4, + }; + } + }, + "menu-button": { alias: "atom", @@ -727,7 +738,7 @@ qx.Theme.define("osparc.theme.Appearance", { decorator: "rounded", cursor: states.disabled ? "not-allowed" : "pointer", backgroundColor: states.selected || states.hovered ? "pb-new" : undefined, - textColor: states.selected ? "default-button-text" : "text", + textColor: "text", padding: [4, 8] } } @@ -971,7 +982,7 @@ qx.Theme.define("osparc.theme.Appearance", { style: state => ({ decorator: state.hovered || state.focused ? "form-button-warning-hover" : "form-button-warning", backgroundColor: state.hovered || state.focused ? "default-button-hover-background" : "warning", - textColor: state.hovered || state.focused ? "default-button-text" : "black", + textColor: "black", }) }, @@ -980,7 +991,7 @@ qx.Theme.define("osparc.theme.Appearance", { style: state => ({ decorator: state.hovered || state.focused ? "form-button-danger-hover" : "form-button-danger", backgroundColor: state.hovered || state.focused ? "default-button-hover-background" : "error", - textColor: state.hovered || state.focused ? "default-button-text" : "default-button-text" // dark theme's text color + textColor: "black", }) }, @@ -997,7 +1008,7 @@ qx.Theme.define("osparc.theme.Appearance", { decorator: "tab-button", cursor: "pointer", padding: 5, - textColor: "default-button-text", + textColor: "white", backgroundColor: "default-button-background" }; if (states.hovered) { diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js index 64a3784c276..47b42050a36 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js @@ -122,28 +122,6 @@ qx.Theme.define("osparc.theme.ColorDark", { "tooltip": "flash_message_bg", "tooltip-text": "text", - // table - "table-header": "background-main", - "table-header-foreground": "c09", - "table-header-border": "c07", - "table-focus-indicator": "background-main-5", - - // used in table code - "table-header-cell": "background-main", - "table-row-background-even": "background-main", - "table-row-background-odd": "background-main", - "table-row-background-focused": "background-main-1", - "table-row-background-focused-selected": "background-main-2", - "table-row-background-selected": "background-main-2", - - // foreground - "table-row-selected": "c12", - "table-row": "c09", - - // table grid color - "table-row-line": "background-main", - "table-column-line": "background-main", - // used in progressive code "progressive-table-header": "c08", "progressive-table-row-background-even": "background-main", diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js index 6b798200e18..629e75ccef5 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js @@ -124,28 +124,6 @@ qx.Theme.define("osparc.theme.ColorLight", { "tooltip-text": "text", - // table - "table-header": "background-main", - "table-header-foreground": "c09", - "table-header-border": "c07", - "table-focus-indicator": "background-main-5", - - // used in table code - "table-header-cell": "background-main", - "table-row-background-even": "background-main", - "table-row-background-odd": "background-main", - "table-row-background-focused": "background-main-1", - "table-row-background-focused-selected": "background-main-2", - "table-row-background-selected": "background-main-2", - - // foreground - "table-row-selected": "c12", - "table-row": "c09", - - // table grid color - "table-row-line": "background-main", - "table-column-line": "background-main", - // used in progressive code "progressive-table-header": "c08", "progressive-table-row-background-even": "background-main", diff --git a/services/static-webserver/client/source/class/osparc/theme/Decoration.js b/services/static-webserver/client/source/class/osparc/theme/Decoration.js index a1381421494..2a32cae2559 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Decoration.js +++ b/services/static-webserver/client/source/class/osparc/theme/Decoration.js @@ -101,6 +101,10 @@ qx.Theme.define("osparc.theme.Decoration", { } }, + "form-input-focused-invalid": { + include: "form-input-invalid" + }, + "form-array-container": { style: { radius: 2, diff --git a/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js b/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js index 95883e8284f..aaa58363b21 100644 --- a/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js +++ b/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js @@ -29,7 +29,6 @@ qx.Theme.define("osparc.theme.mixin.Color", { "pb-locked": "rgba(113, 157, 181, 0.4)", // button - "default-button-text": "rgba(255, 255, 255, 1)", "default-button-text-outline": "contrasted-text-light", "default-button-text-action": "contrasted-text-dark", "default-button": "product-color", @@ -44,8 +43,6 @@ qx.Theme.define("osparc.theme.mixin.Color", { "default-button-disabled-background": "rgba(113, 157, 181, 0.25)", "default-button-focus-blur": "rgba(254, 233, 86, 1)", - "status_icon": "rgba(255, 255, 255, 1)", - // separator "border-separator": "product-color", @@ -54,6 +51,29 @@ qx.Theme.define("osparc.theme.mixin.Color", { "logger-warning-message": "warning-yellow", "logger-error-message": "failed-red", - "workbench-edge-selected": "busy-orange" + "workbench-edge-selected": "busy-orange", + + + // table + "table-header": "transparent", + "table-header-foreground": "text", // text color + "table-header-border": "text", // header underline + "table-header-cell": "transparent", + + // used in table code + "table-focus-indicator": "transparent", + "table-row-background-even": "transparent", + "table-row-background-odd": "transparent", + "table-row-background-focused": "transparent", + "table-row-background-focused-selected": "background-main-2", + "table-row-background-selected": "background-main-2", + + // foreground + "table-row-selected": "text", + "table-row": "text", + + // table grid color + "table-row-line": "transparent", + "table-column-line": "transparent", } }); diff --git a/services/static-webserver/client/source/class/osparc/tours/Manager.js b/services/static-webserver/client/source/class/osparc/tours/Manager.js index cf2618c3d4e..2d4445dcf7c 100644 --- a/services/static-webserver/client/source/class/osparc/tours/Manager.js +++ b/services/static-webserver/client/source/class/osparc/tours/Manager.js @@ -66,7 +66,7 @@ qx.Class.define("osparc.tours.Manager", { switch (id) { case "intro-text": control = new qx.ui.basic.Label().set({ - value: this.tr("This collection of Guided Tours will show you how to use the framework:"), + value: this.tr("This collection of Guided Tours will show you how to use the platform:"), rich: true, wrap: true, font: "text-14" diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/DateAndBy.js b/services/static-webserver/client/source/class/osparc/ui/basic/DateAndBy.js index 320a5562f71..42ec2db9500 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/DateAndBy.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/DateAndBy.js @@ -96,9 +96,12 @@ qx.Class.define("osparc.ui.basic.DateAndBy", { const atom = this.getChildControl("last-touching"); const myGroupId = osparc.auth.Data.getInstance().getGroupId(); if (groupId === myGroupId) { - atom.setLabel("by me"); + atom.set({ + label: "by me", + icon: null, + }) } else { - atom.setLabel("by"); + atom.setLabel("by "); osparc.dashboard.CardBase.addHintFromGids(atom, [groupId]); } } diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/Logo.js b/services/static-webserver/client/source/class/osparc/ui/basic/Logo.js index e35d7659bda..c0d82042f21 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/Logo.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/Logo.js @@ -32,6 +32,8 @@ qx.Class.define("osparc.ui.basic.Logo", { this.__resetSourcePath(); + osparc.utils.Utils.setAltToImage(this, "product-logo"); + const themeManager = qx.theme.manager.Meta.getInstance(); themeManager.addListener("changeTheme", () => this.__resetSourcePath(), this); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/NodeStatusUI.js b/services/static-webserver/client/source/class/osparc/ui/basic/NodeStatusUI.js index d89c747fbea..25d7917c230 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/NodeStatusUI.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/NodeStatusUI.js @@ -145,7 +145,7 @@ qx.Class.define("osparc.ui.basic.NodeStatusUI", { this.getNode().getStatus().addListener("changeProgress", e => { const progress = e.getData(); if (progress > 0 && progress < 100) { - this.getChildControl("label").setValue(this.tr("Uploading")); + this.getChildControl("label").setValue(this.tr("Uploading...")); } }); } diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js b/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js index 83ef2e94646..878413af5e9 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js @@ -127,6 +127,22 @@ qx.Class.define("osparc.ui.basic.SVGImage", { const brightnessValue = l3 => l3; const contrastValue = l4 => l4 > 50 ? 50 : l4; return `invert(${invertValue(l)}%) sepia(${sepiaValue(s)}%) saturate(${saturateValue(s)}%) hue-rotate(${h}deg) brightness(${brightnessValue(l)}%) contrast(${contrastValue(l)}%)`; + }, + + setColorToImage: function(image, keywordOrRgb) { + if (keywordOrRgb === null) { + keywordOrRgb = "text"; + } + let filterValue = this.self().keywordToCSSFilter(keywordOrRgb); + if (filterValue === null) { + const hexColor = qx.theme.manager.Color.getInstance().resolve(keywordOrRgb); + const rgbColor = qx.util.ColorUtil.hexStringToRgb(hexColor); + filterValue = this.self().rgbToCSSFilter(rgbColor); + } + const myStyle = { + "filter": filterValue + }; + image.getContentElement().setStyles(myStyle); } }, @@ -160,19 +176,7 @@ qx.Class.define("osparc.ui.basic.SVGImage", { * @param keywordOrRgb {string} predefined keyword or rgb in the following format "0,255,0" */ __applyImageColor: function(keywordOrRgb) { - if (keywordOrRgb === null) { - keywordOrRgb = "text"; - } - let filterValue = this.self().keywordToCSSFilter(keywordOrRgb); - if (filterValue === null) { - const hexColor = qx.theme.manager.Color.getInstance().resolve(keywordOrRgb); - const rgbColor = qx.util.ColorUtil.hexStringToRgb(hexColor); - filterValue = this.self().rgbToCSSFilter(rgbColor); - } - const myStyle = { - "filter": filterValue - }; - this.getChildControl("image").getContentElement().setStyles(myStyle); - } + this.self().setColorToImage(this.getChildControl("image"), keywordOrRgb); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js b/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js index 53e9d77285c..ff7c6f5d98d 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js @@ -72,7 +72,7 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { allowStretchX: true, allowStretchY: true, alignX: "center", - alignY: "middle" + alignY: "middle", }); this.addCenteredWidget(control); break; @@ -83,7 +83,11 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { __applySource: function(val) { const image = this.getChildControl("image"); if (val) { - image.setSource(val); + if (osparc.utils.Utils.isValidHttpUrl(val)) { + osparc.utils.Utils.setUrlSourceToImage(image, val); + } else { + image.setSource(val); + } } }, @@ -96,10 +100,16 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { if (srcWidth && srcHeight) { const aspectRatio = srcWidth/srcHeight; if (this.getBounds() && this.getBounds().width < image.getMaxWidth()) { - image.setMaxWidth(this.getBounds().width); + image.set({ + minWidth: parseInt(this.getBounds().width), + maxWidth: parseInt(this.getBounds().width), + }); } if (this.getBounds() && this.getBounds().height < image.getMaxHeight()) { - image.setMaxHeight(this.getBounds().height); + image.set({ + minHeight: parseInt(this.getBounds().height), + maxHeight: parseInt(this.getBounds().height), + }); } const maxWidth = image.getMaxWidth(); const maxHeight = image.getMaxHeight(); @@ -107,12 +117,18 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { if (maxWidth && maxHeight) { const newMaxHeight = maxWidth/aspectRatio; if (newMaxHeight < maxHeight) { - image.setMaxHeight(parseInt(newMaxHeight)); + image.set({ + minHeight: parseInt(newMaxHeight), + maxHeight: parseInt(newMaxHeight), + }); return; } const newMaxWidth = maxHeight*aspectRatio; if (newMaxWidth < maxWidth) { - image.setMaxWidth(parseInt(newMaxWidth)); + image.set({ + minWidth: parseInt(newMaxWidth), + maxWidth: parseInt(newMaxWidth), + }); return; } return; @@ -120,13 +136,19 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { if (maxWidth) { const newMaxHeight = maxWidth/aspectRatio; - image.setMaxHeight(parseInt(newMaxHeight)); + image.set({ + minHeight: parseInt(newMaxHeight), + maxHeight: parseInt(newMaxHeight), + }); return; } if (maxHeight) { const newMaxWidth = maxHeight*aspectRatio; - image.setMaxWidth(parseInt(newMaxWidth)); + image.set({ + minWidth: parseInt(newMaxWidth), + maxWidth: parseInt(newMaxWidth), + }); return; } } diff --git a/services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithIcon.js b/services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithIcon.js new file mode 100644 index 00000000000..a8252d6040c --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithIcon.js @@ -0,0 +1,62 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.ui.form.renderer.SingleWithIcon", { + extend: qx.ui.form.renderer.Single, + + construct: function(form, icons) { + if (icons) { + this.__icons = icons; + } else { + this.__icons = {}; + } + + this.base(arguments, form); + }, + + members: { + __icons: null, + + setIcons: function(icons) { + this.__icons = icons; + + this._onFormChange(); + }, + + // overridden + addItems: function(items, names, title, itemOptions, headerOptions) { + this.base(arguments, items, names, title, itemOptions, headerOptions); + + // header + let row = title === null ? 0 : 1; + + for (let i = 0; i < items.length; i++) { + if (i in this.__icons) { + const image = new qx.ui.basic.Image(this.__icons[i]).set({ + alignY: "middle", + }); + this._add(image, { + row, + column: 2, + }); + } + + row++; + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js b/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js index c51147869fb..5b76a46045a 100644 --- a/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js +++ b/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js @@ -138,7 +138,7 @@ qx.Class.define("osparc.ui.list.CollaboratorListItem", { // highlight me const email = osparc.auth.Data.getInstance().getEmail(); - if (email === value) { + if (value && value.includes(email)) { this.addState("selected"); } }, diff --git a/services/static-webserver/client/source/class/osparc/ui/list/MemberListItem.js b/services/static-webserver/client/source/class/osparc/ui/list/MemberListItem.js index 1019b421875..db088d1f691 100644 --- a/services/static-webserver/client/source/class/osparc/ui/list/MemberListItem.js +++ b/services/static-webserver/client/source/class/osparc/ui/list/MemberListItem.js @@ -36,7 +36,7 @@ qx.Class.define("osparc.ui.list.MemberListItem", { // highlight me const email = osparc.auth.Data.getInstance().getEmail(); - if (email === value) { + if (value && value.includes(email)) { this.addState("selected"); } }, diff --git a/services/static-webserver/client/source/class/osparc/ui/markdown/MarkdownWindow.js b/services/static-webserver/client/source/class/osparc/ui/markdown/MarkdownWindow.js new file mode 100644 index 00000000000..d77a15ad4a0 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/markdown/MarkdownWindow.js @@ -0,0 +1,39 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2025 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Odei Maiz (odeimaiz) + */ + +qx.Class.define("osparc.ui.markdown.MarkdownWindow", { + extend: osparc.ui.window.Window, + + construct: function(markdownUrl) { + this.base(arguments); + + this.set({ + layout: new qx.ui.layout.VBox(), + resizable: true, + showMaximize: false, + showMinimize: false, + centerOnAppear: true, + clickAwayClose: true, + modal: true + }); + + const markdown = new osparc.ui.markdown.Markdown().set({ + padding: 15, + }); + const scrollContainer = new qx.ui.container.Scroll(); + scrollContainer.add(markdown); + this._add(scrollContainer, { + flex: 1, + }); + + if (markdownUrl) { + fetch(markdownUrl) + .then(res => res.text()) + .then(text => markdown.setValue(text)); + } + }, +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js b/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js index d111984ad1e..c7a8f811dfe 100644 --- a/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js +++ b/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js @@ -31,7 +31,7 @@ qx.Class.define("osparc.ui.message.FlashMessage", { */ construct: function(message, level, duration) { this.base(arguments); - this._setLayout(new qx.ui.layout.VBox(15)); + this._setLayout(new qx.ui.layout.VBox(10)); this.set({ padding: 18, @@ -48,14 +48,13 @@ qx.Class.define("osparc.ui.message.FlashMessage", { textColor: this.self().LOG_LEVEL_COLOR_MAP[level].color }); - if (message) { - this.setMessage(message); - } + this.setMessage(message); - // also support duration 0: the message won't be automatically removed - if (duration != null) { - this.setDuration(duration); + if ([null, undefined].includes(duration)) { + const wordCount = message.split(" ").length; + duration = Math.max(5500, wordCount*500); // An average reader takes 300ms to read a word } + this.setDuration(duration); this.getChildControl("closebutton"); }, @@ -68,13 +67,14 @@ qx.Class.define("osparc.ui.message.FlashMessage", { message: { check: "String", - nullable: true, - apply: "__applyMessage" + nullable: false, + apply: "__applyMessage", }, duration: { check: "Number", - nullable: true + init: null, + nullable: true, } }, @@ -142,10 +142,7 @@ qx.Class.define("osparc.ui.message.FlashMessage", { }, __applyMessage: function(value) { - const label = this.getChildControl("message"); - if (label) { - label.setValue(value); - } + this.getChildControl("message").setValue(value); }, addWidget: function(widget) { diff --git a/services/static-webserver/client/source/class/osparc/ui/message/Loading.js b/services/static-webserver/client/source/class/osparc/ui/message/Loading.js index bfd238cd9e2..e222ead4cbf 100644 --- a/services/static-webserver/client/source/class/osparc/ui/message/Loading.js +++ b/services/static-webserver/client/source/class/osparc/ui/message/Loading.js @@ -19,6 +19,7 @@ * The loading page * * ----------------------- + * | [] | * | | * | oSparc/service logo | * | spinner + header | @@ -31,19 +32,20 @@ qx.Class.define("osparc.ui.message.Loading", { extend: qx.ui.core.Widget, - /** - * Constructor for the Loading widget. - * - * @param {Boolean} showMaximizeButton - */ - construct: function(showMaximizeButton = false) { + construct: function() { this.base(arguments); - this._setLayout(new qx.ui.layout.HBox()); - this.set({ - alignX: "center" - }); - this.__buildLayout(showMaximizeButton); + const layout = new qx.ui.layout.Grid(20, 20); + layout.setRowFlex(this.self().GRID_POS.SPACER_TOP, 1); + layout.setRowFlex(this.self().GRID_POS.SPACER_BOTTOM, 1); + layout.setColumnFlex(0, 1); + layout.setColumnMaxWidth(1, 400); + layout.setColumnAlign(1, "center", "middle"); + layout.setColumnFlex(2, 1); + layout.setColumnAlign(2, "right", "middle"); + this._setLayout(layout); + + this.__buildLayout(); }, properties: { @@ -64,15 +66,21 @@ qx.Class.define("osparc.ui.message.Loading", { check: "Array", nullable: true, apply: "__applyMessages" + }, + + /** + * Show Restart-Maximize Toolbar + */ + showToolbar: { + check: "Boolean", + init: false, + event: "changeShowToolbar", } }, - // from osparc.widget.PersistentIframe events: { - /** Fired if the iframe is restored from a minimized or maximized state */ "restore" : "qx.event.type.Event", - /** Fired if the iframe is maximized */ - "maximize" : "qx.event.type.Event" + "maximize" : "qx.event.type.Event", }, statics: { @@ -82,42 +90,38 @@ qx.Class.define("osparc.ui.message.Loading", { STATUS_ICON_SIZE: 20, GRID_POS: { - LOGO: 1, - WAITING: 2, - MESSAGES: 3, - EXTRA_WIDGETS: 4 + TOOLBAR: 0, + SPACER_TOP: 1, + LOGO: 2, + WAITING: 3, + MESSAGES: 4, + EXTRA_WIDGETS: 5, + SPACER_BOTTOM: 6, } }, members: { - __mainLayout: null, __thumbnail: null, __header: null, __messagesContainer: null, __extraWidgets: null, __maxButton: null, - __buildLayout: function(showMaximizeButton) { - this.__createMainLayout(); - this.__createMaximizeButton(showMaximizeButton); - }, - - __createMainLayout: function() { - const layout = new qx.ui.layout.Grid(20, 20); - layout.setColumnFlex(0, 1); - const mainLayout = this.__mainLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(20).set({ - alignX: "center", - alignY: "middle" - })).set({ - width: 400, - padding: 0 - }); + __buildLayout: function() { this._add(new qx.ui.core.Widget(), { - flex: 1 + column: 0, + row: 0 }); - this._add(mainLayout); - this._add(new qx.ui.core.Widget(), { - flex: 1 + + const maxLayout = this.__createMaximizeToolbar(); + this._add(maxLayout, { + column: 2, + row: this.self().GRID_POS.TOOLBAR + }); + + this._add(new qx.ui.core.Spacer(), { + column: 1, + row: this.self().GRID_POS.SPACER_TOP }); const productLogoPath = osparc.product.Utils.getLogoPath(); @@ -137,8 +141,8 @@ qx.Class.define("osparc.ui.message.Loading", { height: logoHeight }); } - mainLayout.addAt(thumbnail, { - column: 0, + this._add(thumbnail, { + column: 1, row: this.self().GRID_POS.LOGO }); @@ -150,53 +154,76 @@ qx.Class.define("osparc.ui.message.Loading", { gap: 15, allowGrowX: false }); + const icon = waitingHeader.getChildControl("icon"); + osparc.service.StatusUI.updateCircleAnimation(icon); const label = waitingHeader.getChildControl("label"); label.set({ rich: true, - wrap: true + wrap: true, + alignX: "center", }); - const icon = waitingHeader.getChildControl("icon"); - osparc.service.StatusUI.updateCircleAnimation(icon); - mainLayout.addAt(waitingHeader, { - column: 0, + this._add(waitingHeader, { + column: 1, row: this.self().GRID_POS.WAITING }); const messages = this.__messagesContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox(10).set({ alignX: "center" })); - mainLayout.addAt(messages, { - column: 0, + this._add(messages, { + column: 1, row: this.self().GRID_POS.MESSAGES }); const extraWidgets = this.__extraWidgets = new qx.ui.container.Composite(new qx.ui.layout.VBox(10).set({ alignX: "center" })); - mainLayout.addAt(extraWidgets, { - column: 0, + this._add(extraWidgets, { + column: 1, row: this.self().GRID_POS.EXTRA_WIDGETS }); + + this._add(new qx.ui.core.Spacer(), { + column: 1, + row: this.self().GRID_POS.SPACER_BOTTOM + }); + }, + + maximizeIFrame: function(maximize) { + if (maximize) { + this.fireEvent("maximize"); + this.addState("maximized"); + } else { + this.fireEvent("restore"); + this.removeState("maximized"); + } + const maxButton = this.__maxButton; + maxButton.set({ + label: osparc.widget.PersistentIframe.getZoomLabel(maximize), + icon: osparc.widget.PersistentIframe.getZoomIcon(maximize) + }); + osparc.utils.Utils.setIdToWidget(maxButton, osparc.widget.PersistentIframe.getMaximizeWidgetId(maximize)); + qx.event.message.Bus.getInstance().dispatchByName("maximizeIframe", this.hasState("maximized")); }, - __createMaximizeButton: function(showMaximizeButton) { + __createMaximizeToolbar: function() { const maximize = false; const maxButton = this.__maxButton = osparc.widget.PersistentIframe.createToolbarButton(maximize).set({ + maxHeight: 25, label: osparc.widget.PersistentIframe.getZoomLabel(maximize), icon: osparc.widget.PersistentIframe.getZoomIcon(maximize), - visibility: showMaximizeButton ? "visible" : "excluded" }); osparc.utils.Utils.setIdToWidget(maxButton, osparc.widget.PersistentIframe.getMaximizeWidgetId(maximize)); maxButton.addListener("execute", () => this.maximizeIFrame(!this.hasState("maximized")), this); - const maximizeLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({ - maxWidth: 100 - }); - maximizeLayout.add(maxButton); - maximizeLayout.add(new qx.ui.core.Widget(), { - flex: 1 + const toolbarLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ + alignX: "right", + })); + this.bind("showToolbar", toolbarLayout, "visibility", { + converter: showToolbar => showToolbar ? "visible" : "hidden" }); - this._add(maximizeLayout); + toolbarLayout.add(maxButton); + return toolbarLayout; }, __applyLogo: function(newLogo) { @@ -264,23 +291,5 @@ qx.Class.define("osparc.ui.message.Loading", { addExtraWidget: function(widget) { this.__extraWidgets.add(widget); }, - - // from osparc.widget.PersistentIframe - maximizeIFrame: function(maximize) { - if (maximize) { - this.fireEvent("maximize"); - this.addState("maximized"); - } else { - this.fireEvent("restore"); - this.removeState("maximized"); - } - const maxButton = this.__maxButton; - maxButton.set({ - label: osparc.widget.PersistentIframe.getZoomLabel(maximize), - icon: osparc.widget.PersistentIframe.getZoomIcon(maximize) - }); - osparc.utils.Utils.setIdToWidget(maxButton, osparc.widget.PersistentIframe.getMaximizeWidgetId(maximize)); - qx.event.message.Bus.getInstance().dispatchByName("maximizeIframe", this.hasState("maximized")); - } } }); diff --git a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ButtonRenderer.js b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ButtonRenderer.js new file mode 100644 index 00000000000..445840c570d --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ButtonRenderer.js @@ -0,0 +1,58 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2035 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.ui.table.cellrenderer.ButtonRenderer", { + extend: osparc.ui.table.cellrenderer.Html, + + construct: function(clickAction) { + this.base(arguments); + + this.setClickAction(clickAction); + }, + + properties: { + clickAction: { + check: "String", + nullable: false, + init: "clickAction", + }, + + buttonContent: { + check: "String", + nullable: false, + init: "", + } + }, + + members: { + // Override + _getContentHtml: function(cellInfo) { + const clickAction = this.getClickAction(); + const buttonContent = this.getButtonContent(); + + // Return the button with the image + return ` +
+ ${buttonContent} +
+ `; + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/Html.js b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/Html.js index bc715a54051..11e1c5b976e 100644 --- a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/Html.js +++ b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/Html.js @@ -20,9 +20,7 @@ */ qx.Class.define("osparc.ui.table.cellrenderer.Html", { extend: qx.ui.table.cellrenderer.Html, - construct: function() { - this.base(arguments); - }, + members: { // Override _getCellStyle: function(cellInfo) { diff --git a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ImageButtonRenderer.js b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ImageButtonRenderer.js new file mode 100644 index 00000000000..8b9fd7896bd --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ImageButtonRenderer.js @@ -0,0 +1,44 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2035 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.ui.table.cellrenderer.ImageButtonRenderer", { + extend: osparc.ui.table.cellrenderer.ButtonRenderer, + + construct: function(clickAction, iconPath) { + this.base(arguments, clickAction); + + this.setIconPath(iconPath); + }, + + properties: { + iconPath: { + check: "String", + init: null, + nullable: false, + apply: "__applyIconPath", + }, + }, + + members: { + __applyIconPath: function(iconPath) { + const resMgr = qx.util.ResourceManager.getInstance(); + const iconUrl = resMgr.toUri(iconPath); // Resolves to the correct URL of the asset + + this.setButtonContent(`icon`); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js b/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js index 1b2f7b8c6bd..0d1c61b20aa 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js @@ -100,6 +100,10 @@ qx.Class.define("osparc.ui.window.Dialog", { this.__extraWidgetsLayout.add(widget); }, + getExtraWidgetsLayout: function() { + return this.__extraWidgetsLayout; + }, + /** * Adds a button to the dialog. * @param {qx.ui.form.Button} button Button that will be added to the bottom bar of the dialog. diff --git a/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js b/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js index e3c75167676..6706b152f92 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js @@ -79,7 +79,7 @@ qx.Class.define("osparc.ui.window.TabbedView", { return control || this.base(arguments, id); }, - addWidgetOnTopOfTheTabs: function(widget) { + addWidgetToTabs: function(widget) { this.getChildControl("tabs-view").getChildControl("bar").add(widget); }, diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Window.js b/services/static-webserver/client/source/class/osparc/ui/window/Window.js index 9dadc428826..c8f8c304d30 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Window.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Window.js @@ -75,6 +75,7 @@ qx.Class.define("osparc.ui.window.Window", { resizable: true, width: width, minHeight: minHeight, + maxHeight: Math.max(minHeight, document.documentElement.clientHeight), modal: true, clickAwayClose: true }); diff --git a/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js b/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js index dcad527fff9..ccd68623f94 100644 --- a/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js +++ b/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js @@ -28,6 +28,7 @@ qx.Class.define("osparc.utils.DisabledPlugins", { SCICRUNCH: "WEBSERVER_SCICRUNCH", VERSION_CONTROL: "WEBSERVER_VERSION_CONTROL", META_MODELING: "WEBSERVER_META_MODELING", + LICENSES: "WEBSERVER_LICENSES", isExportDisabled: function() { return this.__isPluginDisabled(this.EXPORT); @@ -47,6 +48,17 @@ qx.Class.define("osparc.utils.DisabledPlugins", { return this.__isPluginDisabled(this.META_MODELING); }, + isLicensesDisabled: function() { + return this.__isPluginDisabled(this.LICENSES); + }, + + isJobsEnabled: function() { + if (osparc.utils.Utils.isDevelopmentPlatform() && osparc.product.Utils.isProduct("s4lacad")) { + return true; + } + return false; + }, + __isPluginDisabled: function(key) { const statics = osparc.store.Store.getInstance().get("statics"); if (statics) { diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js index 6f8bcec17f6..3fa7a943d85 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Utils.js +++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js @@ -91,6 +91,30 @@ qx.Class.define("osparc.utils.Utils", { FLOATING_Z_INDEX: 1000001 + 1, + checkImageExists: function(url) { + return new Promise(resolve => { + const img = new Image(); + img.onload = () => resolve(true); + img.onerror = () => resolve(false); + img.src = url; + }); + }, + + setUrlSourceToImage: function(image, imgSrc) { + let source = osparc.product.Utils.getThumbnailUrl(); + this.checkImageExists(imgSrc) + .then(exists => { + if (exists) { + source = imgSrc; + } + }) + .finally(() => image.setSource(source)); + }, + + addWhiteSpaces: function(integer) { + return new Intl.NumberFormat("fr-FR").format(integer); // french will add white spaces every 3 digits + }, + updateTabName: function(name) { document.title = name; }, @@ -107,6 +131,18 @@ qx.Class.define("osparc.utils.Utils", { return newName; }, + getIconFromResource: function(resourceMetadata) { + if (resourceMetadata) { + if (resourceMetadata["icon"]) { + return resourceMetadata["icon"]; + } + if (resourceMetadata["thumbnail"]) { + return resourceMetadata["thumbnail"]; + } + } + return osparc.dashboard.CardBase.PRODUCT_ICON; + }, + isEmail: function(value) { const reg = /^([A-Za-z0-9_\-.+])+@([A-Za-z0-9_\-.])+\.([A-Za-z]{2,})$/; return reg.test(value); @@ -252,22 +288,6 @@ qx.Class.define("osparc.utils.Utils", { }, 2*onTime); }, - prettifyMenu: function(menu) { - menu.set({ - font: "text-14", - padding: 4 - }); - menu.getChildren().forEach(menuItem => { - if (menuItem.classname !== "qx.ui.menu.Separator") { - menuItem.setPadding(4); - } - }); - - menu.getContentElement().setStyles({ - "border-radius": "4px" - }); - }, - hardRefresh: function() { // https://stackoverflow.com/questions/5721704/window-location-reload-with-clear-cache // No cigar. Tried: @@ -552,15 +572,24 @@ qx.Class.define("osparc.utils.Utils", { return daysBetween; }, - createReleaseNotesLink: function() { - const versionLink = new osparc.ui.basic.LinkLabel(); + getReleaseTag: function() { const rData = osparc.store.StaticInfo.getInstance().getReleaseData(); const platformVersion = osparc.utils.LibVersions.getPlatformVersion(); - let text = "osparc-simcore "; - text += (rData["tag"] && rData["tag"] !== "latest") ? rData["tag"] : platformVersion.version; + let text = (rData["tag"] && rData["tag"] !== "latest") ? rData["tag"] : platformVersion.version; + return text; + }, + + getReleaseLink: function() { + const rData = osparc.store.StaticInfo.getInstance().getReleaseData(); + return rData["url"] || osparc.utils.LibVersions.getVcsRefUrl(); + }, + + createReleaseNotesLink: function() { + let text = "osparc-simcore " + this.getReleaseTag(); const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); text += platformName.length ? ` (${platformName})` : ""; - const url = rData["url"] || osparc.utils.LibVersions.getVcsRefUrl(); + const url = this.self().getReleaseLink(); + const versionLink = new osparc.ui.basic.LinkLabel(); versionLink.set({ value: text, url @@ -571,14 +600,14 @@ qx.Class.define("osparc.utils.Utils", { expirationMessage: function(daysToExpiration) { let msg = ""; if (daysToExpiration === 0) { - msg = qx.locale.Manager.tr("This account will expire Today."); + msg = qx.locale.Manager.tr("Your account will expire today."); } else if (daysToExpiration === 1) { - msg = qx.locale.Manager.tr("This account will expire Tomorrow."); + msg = qx.locale.Manager.tr("Your account will expire tomorrow."); } else { - msg = qx.locale.Manager.tr("This account will expire in ") + daysToExpiration + qx.locale.Manager.tr(" days."); + msg = qx.locale.Manager.tr("Your account will expire in ") + daysToExpiration + qx.locale.Manager.tr(" days."); } msg += "
"; - msg += qx.locale.Manager.tr("Please contact us by email:"); + msg += qx.locale.Manager.tr("Please contact us via email:"); msg += "
"; const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); msg += supportEmail; @@ -951,7 +980,7 @@ qx.Class.define("osparc.utils.Utils", { document.body.removeChild(textArea); if (copied) { - osparc.FlashMessenger.getInstance().logAs(qx.locale.Manager.tr("Copied to clipboard")); + osparc.FlashMessenger.logAs(qx.locale.Manager.tr("Copied to clipboard")); } return copied; @@ -1043,7 +1072,7 @@ qx.Class.define("osparc.utils.Utils", { }, setIdToWidget: (qWidget, id) => { - if (qWidget.getContentElement && qWidget.getContentElement()) { + if (qWidget.getContentElement && qWidget.getContentElement() && id) { qWidget.getContentElement().setAttribute("osparc-test-id", id); } }, @@ -1068,6 +1097,13 @@ qx.Class.define("osparc.utils.Utils", { } }, + setAltToImage: (qWidget, altText) => { + if (qWidget.getContentElement && qWidget.getContentElement()) { + qWidget.getContentElement().removeAttribute("alt"); + qWidget.getContentElement().setAttribute("alt", altText); + } + }, + // Function that creates a unique tabId even for duplicated tabs getClientSessionID: function() { const getUniqueSessionId = () => { @@ -1126,7 +1162,7 @@ qx.Class.define("osparc.utils.Utils", { }, isObject: function(v) { - return typeof v === "object" && v !== null; + return typeof v === "object" && v !== null && !Array.isArray(v); }, centerTabIcon: function(tabpage) { diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelDetails.js b/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelDetails.js deleted file mode 100644 index 5e1f87c81ee..00000000000 --- a/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelDetails.js +++ /dev/null @@ -1,248 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2024 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.vipMarket.AnatomicalModelDetails", { - extend: qx.ui.core.Widget, - - construct: function() { - this.base(arguments); - - const layout = new qx.ui.layout.VBox(15); - this._setLayout(layout); - - this.__poplulateLayout(); - }, - - events: { - "modelPurchaseRequested": "qx.event.type.Data", - "modelImportRequested": "qx.event.type.Data", - }, - - properties: { - openBy: { - check: "String", - init: null, - nullable: true, - event: "changeOpenBy", - }, - - anatomicalModelsData: { - check: "Object", - init: null, - nullable: true, - apply: "__poplulateLayout" - }, - }, - - members: { - __poplulateLayout: function() { - this._removeAll(); - - const anatomicalModelsData = this.getAnatomicalModelsData(); - if (anatomicalModelsData) { - const modelInfo = this.__createModelInfo(anatomicalModelsData); - const pricingUnits = this.__createPricingUnits(anatomicalModelsData); - const importButton = this.__createImportSection(anatomicalModelsData); - this._add(modelInfo); - this._add(pricingUnits); - this._add(importButton); - } else { - const selectModelLabel = new qx.ui.basic.Label().set({ - value: this.tr("Select a model for more details"), - font: "text-16", - alignX: "center", - alignY: "middle", - allowGrowX: true, - allowGrowY: true, - }); - this._add(selectModelLabel); - } - }, - - __createModelInfo: function(anatomicalModelsData) { - const cardGrid = new qx.ui.layout.Grid(16, 16); - const cardLayout = new qx.ui.container.Composite(cardGrid); - - const description = anatomicalModelsData["description"]; - description.split(" - ").forEach((desc, idx) => { - const titleLabel = new qx.ui.basic.Label().set({ - value: desc, - font: "text-16", - alignX: "center", - alignY: "middle", - allowGrowX: true, - allowGrowY: true, - }); - cardLayout.add(titleLabel, { - column: 0, - row: idx, - colSpan: 2, - }); - }); - - const thumbnail = new qx.ui.basic.Image().set({ - source: anatomicalModelsData["thumbnail"], - alignY: "middle", - scale: true, - allowGrowX: true, - allowGrowY: true, - allowShrinkX: true, - allowShrinkY: true, - maxWidth: 256, - maxHeight: 256, - }); - cardLayout.add(thumbnail, { - column: 0, - row: 2, - }); - - const features = anatomicalModelsData["features"]; - const featuresGrid = new qx.ui.layout.Grid(8, 8); - const featuresLayout = new qx.ui.container.Composite(featuresGrid); - let idx = 0; - [ - "Name", - "Version", - "Sex", - "Age", - "Weight", - "Height", - "Date", - "Ethnicity", - "Functionality", - ].forEach(key => { - if (key.toLowerCase() in features) { - const titleLabel = new qx.ui.basic.Label().set({ - value: key, - font: "text-14", - alignX: "right", - }); - featuresLayout.add(titleLabel, { - column: 0, - row: idx, - }); - - const nameLabel = new qx.ui.basic.Label().set({ - value: features[key.toLowerCase()], - font: "text-14", - alignX: "left", - }); - featuresLayout.add(nameLabel, { - column: 1, - row: idx, - }); - - idx++; - } - }); - - const doiTitle = new qx.ui.basic.Label().set({ - value: "DOI", - font: "text-14", - alignX: "right", - marginTop: 16, - }); - featuresLayout.add(doiTitle, { - column: 0, - row: idx, - }); - - const doiValue = new qx.ui.basic.Label().set({ - value: anatomicalModelsData["DOI"] ? anatomicalModelsData["DOI"] : "-", - font: "text-14", - alignX: "left", - marginTop: 16, - }); - featuresLayout.add(doiValue, { - column: 1, - row: idx, - }); - - cardLayout.add(featuresLayout, { - column: 1, - row: 2, - }); - - return cardLayout; - }, - - __createPricingUnits: function(anatomicalModelsData) { - const pricingUnitsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ - alignX: "center" - })); - - osparc.store.Pricing.getInstance().fetchPricingUnits(anatomicalModelsData["pricingPlanId"]) - .then(pricingUnits => { - pricingUnits.forEach(pricingUnit => { - pricingUnit.set({ - classification: "LICENSE" - }); - const pUnit = new osparc.study.PricingUnitLicense(pricingUnit).set({ - showRentButton: true, - }); - pUnit.addListener("rentPricingUnit", () => { - this.fireDataEvent("modelPurchaseRequested", { - modelId: anatomicalModelsData["modelId"], - licensedItemId: anatomicalModelsData["licensedItemId"], - pricingPlanId: anatomicalModelsData["pricingPlanId"], - pricingUnitId: pricingUnit.getPricingUnitId(), - }); - }, this); - pricingUnitsLayout.add(pUnit); - }); - }) - .catch(err => console.error(err)); - - return pricingUnitsLayout; - }, - - __createImportSection: function(anatomicalModelsData) { - const importSection = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ - alignX: "center" - })); - - anatomicalModelsData["purchases"].forEach(purchase => { - const seatsText = "seat" + (purchase["numberOfSeats"] > 1 ? "s" : ""); - const entry = new qx.ui.basic.Label().set({ - value: `${purchase["numberOfSeats"]} ${seatsText} available until ${osparc.utils.Utils.formatDate(purchase["expiresAt"])}`, - font: "text-14", - }); - importSection.add(entry); - }); - - const importButton = new qx.ui.form.Button().set({ - label: this.tr("Import"), - appearance: "strong-button", - center: true, - maxWidth: 200, - alignX: "center", - }); - this.bind("openBy", importButton, "visibility", { - converter: openBy => openBy ? "visible" : "excluded" - }); - importButton.addListener("execute", () => { - this.fireDataEvent("modelImportRequested", { - modelId: anatomicalModelsData["modelId"] - }); - }, this); - if (anatomicalModelsData["purchases"].length) { - importSection.add(importButton); - } - return importSection; - }, - } -}); diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemDetails.js b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemDetails.js new file mode 100644 index 00000000000..509924c3ae7 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemDetails.js @@ -0,0 +1,453 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.vipMarket.LicensedItemDetails", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + const layout = new qx.ui.layout.VBox(15); + this._setLayout(layout); + + this.set({ + allowGrowX: false, + }); + + this.__populateLayout(); + }, + + events: { + "modelPurchaseRequested": "qx.event.type.Data", + "modelImportRequested": "qx.event.type.Data", + }, + + properties: { + openBy: { + check: "String", + init: null, + nullable: true, + event: "changeOpenBy", + }, + + anatomicalModelsData: { + check: "Object", + init: null, + nullable: true, + apply: "__populateLayout" + }, + }, + + statics: { + createThumbnail: function(source, size) { + return new qx.ui.basic.Image().set({ + source: source, + alignY: "middle", + scale: true, + allowGrowX: true, + allowGrowY: true, + allowShrinkX: true, + allowShrinkY: true, + maxWidth: size, + maxHeight: size, + }); + }, + }, + + members: { + __modelsInfoStack: null, + + __populateLayout: function() { + this._removeAll(); + + const licensedItem = this.getAnatomicalModelsData(); + if (licensedItem && licensedItem.getLicensedResources().length) { + this.__addModelsInfo(); + this.__addSeatsSection(); + this.__addPricing(); + } else { + const selectModelLabel = new qx.ui.basic.Label().set({ + value: this.tr("Select a model for more details"), + font: "text-16", + alignX: "center", + alignY: "middle", + allowGrowX: true, + allowGrowY: true, + }); + this._add(selectModelLabel); + } + }, + + __addModelsInfo: function() { + const modelBundleLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(6)); + + const stack = this.__modelsInfoStack = new qx.ui.container.Stack(); + this._add(stack, { + flex: 1 + }); + modelBundleLayout.add(this.__modelsInfoStack); + + this.__populateModelsInfo(); + + const licensedItem = this.getAnatomicalModelsData(); + const licensedResources = licensedItem.getLicensedResources(); + if (licensedResources.length > 1) { + const modelSelectionLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(4)); + const titleLabel = new qx.ui.basic.Label(this.tr("This bundle contains:")); + modelSelectionLayout.add(titleLabel); + const modelsLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(4)); + modelSelectionLayout.add(modelsLayout); + + const modelSelected = idx => { + if (this.__modelsInfoStack.getSelectables().length > idx) { + this.__modelsInfoStack.setSelection([stack.getSelectables()[idx]]); + } + + const selectedBorderColor = qx.theme.manager.Color.getInstance().resolve("strong-main"); + const unselectedBorderColor = "transparent"; + modelsLayout.getChildren().forEach((thumbnailAndTitle, index) => { + const thumbnail = thumbnailAndTitle.getChildren()[0]; + osparc.utils.Utils.updateBorderColor(thumbnail, index === idx ? selectedBorderColor : unselectedBorderColor); + }); + } + + licensedResources.forEach((licensedResource, idx) => { + const modelLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(4)).set({ + allowGrowX: false, + }); + const miniThumbnail = this.self().createThumbnail(licensedResource.getThumbnail(), 32); + osparc.utils.Utils.addBorder(miniThumbnail); + modelLayout.add(miniThumbnail); + const title = new qx.ui.basic.Label().set({ + value: osparc.data.model.LicensedItemResource.longName(licensedResource), + alignY: "middle" + }); + modelLayout.add(title); + modelLayout.setCursor("pointer"); + modelLayout.addListener("tap", () => modelSelected(idx)); + modelsLayout.add(modelLayout); + }); + modelBundleLayout.add(modelSelectionLayout); + + modelSelected(0); + } + + this._add(modelBundleLayout); + }, + + __populateModelsInfo: function() { + this.__modelsInfoStack.removeAll(); + + const licensedItem = this.getAnatomicalModelsData(); + const licensedResources = licensedItem.getLicensedResources(); + licensedResources.forEach((licensedResource, index) => { + const modelInfoLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(4)); + + const topGrid = new qx.ui.layout.Grid(8, 6); + topGrid.setColumnFlex(0, 1); + const headerLayout = new qx.ui.container.Composite(topGrid); + if (licensedResource.getTitle()) { + const titleLabel = new qx.ui.basic.Label().set({ + value: licensedResource.getTitle(), + font: "text-16", + alignY: "middle", + allowGrowX: true, + allowGrowY: true, + }); + headerLayout.add(titleLabel, { + column: 0, + row: 0, + }); + } + if (licensedResource.getSubtitle()) { + const subtitleLabel = new qx.ui.basic.Label().set({ + value: licensedResource.getSubtitle(), + font: "text-16", + alignY: "middle", + allowGrowX: true, + allowGrowY: true, + }); + headerLayout.add(subtitleLabel, { + column: 0, + row: 1, + }); + } + if (licensedResource.getManufacturerLabel()) { + const manufacturerLink = new qx.ui.basic.Atom().set({ + label: licensedResource.getManufacturerLabel(), + icon: licensedResource.getManufacturerIcon(), + font: "text-16", + gap: 10, + iconPosition: "right", + cursor: "pointer", + }); + manufacturerLink.getChildControl("icon").set({ + maxWidth: 32, + maxHeight: 32, + scale: true, + decorator: "rounded", + }); + manufacturerLink.addListener("tap", () => window.open(licensedResource.getManufacturerLink())); + headerLayout.add(manufacturerLink, { + column: 1, + row: 0, + rowSpan: 2, + }); + } + modelInfoLayout.add(headerLayout); + + + const middleLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(16)); + const thumbnail = this.self().createThumbnail(licensedResource.getThumbnail(), 256); + middleLayout.add(thumbnail); + + const features = licensedResource.getFeatures(); + const featuresGrid = new qx.ui.layout.Grid(8, 8); + const featuresLayout = new qx.ui.container.Composite(featuresGrid); + let idx = 0; + const capitalizeField = [ + "Sex", + "Species", + "Ethnicity", + "Functionality", + ]; + [ + "Name", + "Version", + "Date", + "Species", + "Sex", + "Age", + "Weight", + "Height", + "Ethnicity", + "Functionality", + ].forEach(key => { + if (key.toLowerCase() in features) { + const titleLabel = new qx.ui.basic.Label().set({ + value: key, + font: "text-14", + alignX: "right", + }); + featuresLayout.add(titleLabel, { + column: 0, + row: idx, + }); + + const value = features[key.toLowerCase()]; + const featureValue = capitalizeField.includes(key) ? osparc.utils.Utils.capitalize(value) : value; + const nameLabel = new qx.ui.basic.Label().set({ + value: featureValue, + font: "text-14", + alignX: "left", + }); + featuresLayout.add(nameLabel, { + column: 1, + row: idx, + }); + + idx++; + } + }); + + if (licensedResource.getDoi()) { + const doiTitle = new qx.ui.basic.Label().set({ + value: "DOI", + font: "text-14", + alignX: "right", + marginTop: 10, + }); + featuresLayout.add(doiTitle, { + column: 0, + row: idx, + }); + + const doiToLink = doi => { + const doiLabel = new osparc.ui.basic.LinkLabel("-").set({ + font: "text-14", + alignX: "left", + marginTop: 10, + }); + if (doi) { + doiLabel.set({ + value: doi, + url: "https://doi.org/" + doi, + font: "link-label-14", + }); + } + return doiLabel; + }; + featuresLayout.add(doiToLink(licensedResource.getDoi()), { + column: 1, + row: idx, + }); + idx++; + } + + if (licensedResource.getTermsOfUseUrl()) { // remove the first one when this info goes down to the model + const tAndC = new qx.ui.basic.Label().set({ + font: "text-14", + value: this.tr("Terms and Conditions"), + rich: true, + anonymous: false, + cursor: "pointer", + }); + tAndC.addListener("tap", () => this.__openLicense(licensedResource.getTermsOfUseUrl())); + featuresLayout.add(tAndC, { + column: 1, + row: idx, + }); + idx++; + } + + middleLayout.add(featuresLayout); + + modelInfoLayout.add(middleLayout); + + const importSection = this.__createImportSection(licensedItem, index); + modelInfoLayout.add(importSection); + + this.__modelsInfoStack.add(modelInfoLayout); + }) + }, + + __openLicense: function(rawLink) { + if (rawLink.includes("github")) { + // make sure the raw version of the link is shown + rawLink += "?raw=true"; + } + const mdWindow = new osparc.ui.markdown.MarkdownWindow(rawLink).set({ + caption: this.tr("Terms and Conditions"), + width: 800, + height: 600, + }); + mdWindow.open(); + }, + + __createImportSection: function(anatomicalModelsData, selectedIdx) { + const importSection = new qx.ui.container.Composite(new qx.ui.layout.VBox().set({ + alignX: "center" + })); + + const importButton = new qx.ui.form.Button().set({ + label: this.tr("Import"), + appearance: "strong-button", + center: true, + maxWidth: 200, + alignX: "center", + marginTop: 10, + }); + this.bind("openBy", importButton, "visibility", { + converter: openBy => openBy ? "visible" : "excluded" + }); + importButton.addListener("execute", () => { + this.fireDataEvent("modelImportRequested", { + modelId: anatomicalModelsData.getLicensedResources()[selectedIdx].getModelId(), + categoryId: anatomicalModelsData.getCategoryId(), + }); + }, this); + + osparc.store.Pricing.getInstance().fetchPricingUnits(anatomicalModelsData.getPricingPlanId()) + .then(pricingUnits => { + if ( + anatomicalModelsData.getSeats().length || + (pricingUnits.length === 1 && pricingUnits[0].getCost() === 0) + ) { + importSection.add(importButton); + } + }); + + return importSection; + }, + + __addPricing: function() { + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox().set({ + alignX: "center" + })) + + const pricingLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({ + allowGrowX: false, + decorator: "border", + }); + layout.add(pricingLayout) + + const pricingUnitsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ + alignX: "center" + })); + const licensedItem = this.getAnatomicalModelsData(); + osparc.store.Pricing.getInstance().fetchPricingUnits(licensedItem.getPricingPlanId()) + .then(pricingUnits => { + if (pricingUnits.length === 1 && pricingUnits[0].getCost() === 0) { + const availableForImporting = new qx.ui.basic.Label().set({ + font: "text-14", + value: this.tr("Available for Importing"), + padding: 10, + }); + pricingUnitsLayout.add(availableForImporting); + // hide the text if Import button is there + this.bind("openBy", pricingLayout, "visibility", { + converter: openBy => openBy ? "excluded" : "visible" + }); + } else { + pricingUnits.forEach(pricingUnit => { + pricingUnit.set({ + classification: "LICENSE" + }); + const pUnit = new osparc.study.PricingUnitLicense(pricingUnit).set({ + showRentButton: true, + }); + pUnit.addListener("rentPricingUnit", () => { + this.fireDataEvent("modelPurchaseRequested", { + licensedItemId: licensedItem.getLicensedItemId(), + pricingPlanId: licensedItem.getPricingPlanId(), + pricingUnitId: pricingUnit.getPricingUnitId(), + }); + }, this); + pricingUnitsLayout.add(pUnit); + }); + } + }) + .catch(err => console.error(err)); + this._add(pricingUnitsLayout); + pricingLayout.add(pricingUnitsLayout); + + this._add(layout); + }, + + __addSeatsSection: function() { + const licensedItem = this.getAnatomicalModelsData(); + if (licensedItem.getSeats().length === 0) { + return; + } + const seatsSection = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ + alignX: "left", + })); + + licensedItem.getSeats().forEach(purchase => { + const nSeats = purchase["numOfSeats"]; + const seatsText = "seat" + (nSeats > 1 ? "s" : ""); + const entry = new qx.ui.basic.Label().set({ + value: `${nSeats} ${seatsText} available until ${osparc.utils.Utils.formatDate(purchase["expireAt"])}`, + font: "text-14", + }); + seatsSection.add(entry); + }); + + this._add(seatsSection); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelListItem.js b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemListItem.js similarity index 74% rename from services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelListItem.js rename to services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemListItem.js index ea3897841fa..9756cc5931a 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/AnatomicalModelListItem.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemListItem.js @@ -15,7 +15,7 @@ ************************************************************************ */ -qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { +qx.Class.define("osparc.vipMarket.LicensedItemListItem", { extend: qx.ui.core.Widget, implement : [qx.ui.form.IModel, osparc.filter.IFilterable], include : [qx.ui.form.MModelProperty, osparc.filter.MFilterable], @@ -24,11 +24,12 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { this.base(arguments); const layout = new qx.ui.layout.Grid(5, 5); - layout.setColumnWidth(0, 64); layout.setRowFlex(0, 1); - layout.setColumnFlex(1, 1); + layout.setColumnFlex(1, 1); // flex display name + layout.setColumnWidth(0, 48); layout.setColumnAlign(0, "center", "middle"); layout.setColumnAlign(1, "left", "middle"); + layout.setColumnAlign(2, "center", "middle"); this._setLayout(layout); this.set({ @@ -53,11 +54,25 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { init: "selectable" }, - modelId: { - check: "Number", + key: { + check: "String", init: null, nullable: false, - event: "changeModelId", + event: "changeKey", + }, + + version: { + check: "String", + init: null, + nullable: false, + event: "changeVersion", + }, + + licensedItemId: { + check: "String", + init: null, + nullable: false, + event: "changeLicensedItemId", }, thumbnail: { @@ -68,12 +83,12 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { apply: "__applyThumbnail", }, - name: { + displayName: { check: "String", init: null, nullable: false, - event: "changeName", - apply: "__applyName", + event: "changeDisplayName", + apply: "__applyDisplayName", }, date: { @@ -83,13 +98,6 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { event: "changeDate", }, - licensedItemId: { - check: "String", - init: null, - nullable: false, - event: "changeLicensedItemId", - }, - pricingPlanId: { check: "Number", init: null, @@ -97,12 +105,12 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { event: "changePricingPlanId", }, - purchases: { + seats: { check: "Array", nullable: false, init: [], - event: "changePurchases", - apply: "__applyPurchases", + event: "changeSeats", + apply: "__applySeats", }, }, @@ -145,6 +153,16 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { column: 1 }); break; + case "n-seats": + control = new qx.ui.basic.Label().set({ + font: "text-14", + alignY: "middle", + }); + this._add(control, { + row: 0, + column: 2 + }); + break; } control.set({ anonymous: true, // pass the tap action over @@ -154,19 +172,29 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { }, __applyThumbnail: function(value) { - this.getChildControl("thumbnail").setSource(value); + if (value) { + this.getChildControl("thumbnail").setSource(value); + } }, - __applyName: function(value) { + __applyDisplayName: function(value) { this.getChildControl("name").setValue(value); + + this.set({ + toolTipText: value + }); }, - __applyPurchases: function(purchases) { - if (purchases.length) { - this.set({ - textColor: "default-button-text", - backgroundColor: "strong-main", - }) + __applySeats: function(seats) { + if (seats === null || seats.length === 0) { + return; + } + const nSeatsLabel = this.getChildControl("n-seats"); + const nSeats = osparc.store.LicensedItems.seatsToNSeats(seats); + if (nSeats) { + nSeatsLabel.setValue(`(${nSeats})`); + } else { + nSeatsLabel.resetValue(); } }, @@ -189,7 +217,7 @@ qx.Class.define("osparc.vipMarket.AnatomicalModelListItem", { _shouldApplyFilter: function(data) { if (data.text) { const checks = [ - this.getName(), + this.getDisplayName(), ]; if (checks.filter(check => check && check.toLowerCase().trim().includes(data.text)).length == 0) { return true; diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/Market.js b/services/static-webserver/client/source/class/osparc/vipMarket/Market.js index 2d88ab85ad8..8e93f4ec75c 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/Market.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/Market.js @@ -18,49 +18,27 @@ qx.Class.define("osparc.vipMarket.Market", { extend: osparc.ui.window.TabbedView, - construct: function(category) { + construct: function(openCategory) { this.base(arguments); const miniWallet = osparc.desktop.credits.BillingCenter.createMiniWalletView().set({ paddingRight: 10, minWidth: 150, }); - this.addWidgetOnTopOfTheTabs(miniWallet); - - osparc.store.LicensedItems.getInstance().getLicensedItems() - .then(() => { - [{ - category: "human", - label: "Humans", - icon: "@FontAwesome5Solid/users/20", - vipSubset: "HUMAN_BODY", - }, { - category: "human_region", - label: "Humans (Region)", - icon: "@FontAwesome5Solid/users/20", - vipSubset: "HUMAN_BODY_REGION", - }, { - category: "animal", - label: "Animals", - icon: "@FontAwesome5Solid/users/20", - vipSubset: "ANIMAL", - }, { - category: "phantom", - label: "Phantoms", - icon: "@FontAwesome5Solid/users/20", - vipSubset: "PHANTOM", - }].forEach(marketInfo => { - this.__buildViPMarketPage(marketInfo); - }); + this.addWidgetToTabs(miniWallet); - if (category) { - this.openCategory(category); - } - }); + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + if (!contextWallet) { + return; + } + + this.__reqOpenCategory = openCategory; + this.__populateCategories(); }, events: { - "importMessageSent": "qx.event.type.Data", + "importMessageSent": "qx.event.type.Event", }, properties: { @@ -73,19 +51,134 @@ qx.Class.define("osparc.vipMarket.Market", { }, members: { - __buildViPMarketPage: function(marketInfo) { - const vipMarketView = new osparc.vipMarket.VipMarket(); + __reqOpenCategory: null, + __myModelsCategoryMarket: null, + __myModelsCategoryButton: null, + + __populateCategories: function() { + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + + const walletId = contextWallet.getWalletId(); + const licensedItemsStore = osparc.store.LicensedItems.getInstance(); + Promise.all([ + licensedItemsStore.getLicensedItems(), + licensedItemsStore.getPurchasedLicensedItems(walletId), + ]) + .then(async values => { + const licensedItems = values[0]; + const purchasedItems = values[1]; + osparc.data.model.LicensedItem.addSeatsFromPurchases(licensedItems, purchasedItems); + const categories = []; + const availableCategory = { + categoryId: "availableModels", + label: this.tr("My Models"), + icon: "osparc/market/RentedModels.svg", + items: [], + }; + categories.push(availableCategory); + let openCategory = null; + Object.values(licensedItems).forEach(licensedItem => { + if (licensedItem.getSeats().length) { + availableCategory["items"].push(licensedItem); + if (!this.__reqOpenCategory) { + openCategory = availableCategory["categoryId"]; + } + } + if (licensedItem && licensedItem.getCategoryId()) { + const categoryId = licensedItem.getCategoryId(); + let category = categories.find(cat => cat["categoryId"] === categoryId); + if (!category) { + category = { + categoryId, + label: licensedItem.getCategoryDisplay() || "Category", + icon: licensedItem.getCategoryIcon(), + items: [], + }; + if (!openCategory) { + openCategory = categoryId; + } + categories.push(category); + } + category["items"].push(licensedItem); + } + }); + + await this.__addFreeItems(); + + categories.forEach(category => { + this.__buildViPMarketPage(category, category["items"]); + }); + + if (openCategory) { + this.__openCategory(openCategory); + } + }); + }, + + __addFreeItems: function() { + const licensedItemsStore = osparc.store.LicensedItems.getInstance(); + return licensedItemsStore.getLicensedItems() + .then(async licensedItems => { + this.__freeItems = []; + const licensedItemsArr = Object.values(licensedItems); + for (const licensedItem of licensedItemsArr) { + const pricingUnits = await osparc.store.Pricing.getInstance().fetchPricingUnits(licensedItem.getPricingPlanId()); + if (pricingUnits.length === 1 && pricingUnits[0].getCost() === 0) { + this.__freeItems.push(licensedItem); + } + } + if (!this.__reqOpenCategory && this.__freeItems.length) { + this.__openCategory("availableModels"); + } + this.__repopulateMyModelsCategory(); + }); + }, + + __buildViPMarketPage: function(marketTabInfo, licensedItems = []) { + const vipMarketView = new osparc.vipMarket.VipMarket(licensedItems); vipMarketView.set({ - vipSubset: marketInfo["vipSubset"], + category: marketTabInfo["categoryId"], }); this.bind("openBy", vipMarketView, "openBy"); + vipMarketView.addListener("modelPurchased", () => this.__repopulateMyModelsCategory()); vipMarketView.addListener("importMessageSent", () => this.fireEvent("importMessageSent")); - const page = this.addTab(marketInfo["label"], marketInfo["icon"], vipMarketView); - page.category = marketInfo["category"]; + const page = this.addTab(marketTabInfo["label"], marketTabInfo["icon"], vipMarketView); + page.category = marketTabInfo["categoryId"]; + if (page.category === "availableModels") { + this.__myModelsCategoryMarket = vipMarketView; + this.__myModelsCategoryButton = page.getChildControl("button"); + this.__myModelsCategoryButton.setVisibility(licensedItems.length ? "visible" : "excluded"); + } return page; }, - openCategory: function(category) { + __repopulateMyModelsCategory: function() { + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + const walletId = contextWallet.getWalletId(); + const licensedItemsStore = osparc.store.LicensedItems.getInstance(); + Promise.all([ + licensedItemsStore.getLicensedItems(), + licensedItemsStore.getPurchasedLicensedItems(walletId), + ]) + .then(values => { + const licensedItems = values[0]; + const purchasedItems = values[1]; + osparc.data.model.LicensedItem.addSeatsFromPurchases(licensedItems, purchasedItems); + let items = []; + Object.values(licensedItems).forEach(licensedItem => { + if (licensedItem.getSeats().length) { + items.push(licensedItem); + } + }); + items = items.concat(this.__freeItems); + this.__myModelsCategoryButton.setVisibility(items.length ? "visible" : "excluded"); + this.__myModelsCategoryMarket.setLicensedItems(items); + }); + }, + + __openCategory: function(category) { const viewFound = this.getChildControl("tabs-view").getChildren().find(view => view.category === category); if (viewFound) { this._openPage(viewFound); diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/MarketWindow.js b/services/static-webserver/client/source/class/osparc/vipMarket/MarketWindow.js index c238f5618b8..70f7d803a76 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/MarketWindow.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/MarketWindow.js @@ -19,12 +19,12 @@ qx.Class.define("osparc.vipMarket.MarketWindow", { extend: osparc.ui.window.TabbedWindow, construct: function(nodeId, category) { - this.base(arguments, "store", this.tr("Market")); + this.base(arguments, "store", this.tr("The Shop")); osparc.utils.Utils.setIdToWidget(this, "storeWindow"); - const width = 1035; - const height = 700; + const width = Math.min(1200, window.innerWidth); // since we go over the supported minimum, take the min + const height = Math.min(700, window.innerHeight); // since we go over the supported minimum, take the min this.set({ width, height diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js b/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js index 77c633b01c4..7cf61cfcbac 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/VipMarket.js @@ -16,18 +16,33 @@ ************************************************************************ */ qx.Class.define("osparc.vipMarket.VipMarket", { - extend: qx.ui.core.Widget, - - construct: function() { - this.base(arguments); - - this._setLayout(new qx.ui.layout.HBox(10)); + extend: qx.ui.splitpane.Pane, + + construct: function(licensedItems) { + this.base(arguments, "horizontal"); + + this.setOffset(5); + this.getChildControl("splitter").set({ + width: 1, + backgroundColor: "text", + opacity: 0.3, + }); + this.getChildControl("slider").set({ + width: 2, + backgroundColor: "text", + opacity: 1, + }); this.__buildLayout(); + + if (licensedItems) { + this.setLicensedItems(licensedItems); + } }, events: { - "importMessageSent": "qx.event.type.Data" + "modelPurchased": "qx.event.type.Event", + "importMessageSent": "qx.event.type.Event", }, properties: { @@ -38,17 +53,16 @@ qx.Class.define("osparc.vipMarket.VipMarket", { event: "changeOpenBy", }, - vipSubset: { - check: ["HUMAN_BODY", "HUMAN_BODY_REGION", "ANIMAL", "PHANTOM"], + category: { + check: "String", init: null, nullable: true, - apply: "__fetchModels", }, }, members: { - __anatomicalModels: null, - __anatomicalModelsModel: null, + __anatomicalBundles: null, + __anatomicalBundlesModel: null, _createChildControlImpl: function(id) { let control; @@ -56,16 +70,16 @@ qx.Class.define("osparc.vipMarket.VipMarket", { case "left-side": control = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ alignY: "middle", + paddingRight: 5, }); - this._add(control); + this.add(control, 0); // flex: 0 break; case "right-side": control = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ alignY: "middle", + paddingLeft: 5, }); - this._add(control, { - flex: 1 - }); + this.add(control, 1); // flex: 1 break; case "toolbar-layout": control = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)).set({ @@ -84,10 +98,12 @@ qx.Class.define("osparc.vipMarket.VipMarket", { control = new osparc.filter.TextFilter("text", "vipModels").set({ alignY: "middle", allowGrowY: false, - minWidth: 160, + allowGrowX: true, + marginRight: 5, }); control.getChildControl("textfield").set({ backgroundColor: "transparent", + allowGrowX: true, }); this.addListener("appear", () => control.getChildControl("textfield").focus()); this.getChildControl("toolbar-layout").add(control, { @@ -98,23 +114,25 @@ qx.Class.define("osparc.vipMarket.VipMarket", { control = new qx.ui.form.List().set({ decorator: "no-border", spacing: 5, - minWidth: 250, - maxWidth: 250, + width: 250, backgroundColor: "transparent", }); this.getChildControl("left-side").add(control, { flex: 1 }); break; - case "models-details": - control = new osparc.vipMarket.AnatomicalModelDetails().set({ + case "models-details": { + control = new osparc.vipMarket.LicensedItemDetails().set({ padding: 5, }); + const scrollView = new qx.ui.container.Scroll(); + scrollView.add(control); this.bind("openBy", control, "openBy"); - this.getChildControl("right-side").add(control, { + this.getChildControl("right-side").add(scrollView, { flex: 1 }); break; + } } return control || this.base(arguments, id); }, @@ -124,18 +142,19 @@ qx.Class.define("osparc.vipMarket.VipMarket", { this.getChildControl("filter-text"); const modelsUIList = this.getChildControl("models-list"); - const anatomicalModelsModel = this.__anatomicalModelsModel = new qx.data.Array(); - const membersCtrl = new qx.data.controller.List(anatomicalModelsModel, modelsUIList, "name"); + const anatomicalModelsModel = this.__anatomicalBundlesModel = new qx.data.Array(); + const membersCtrl = new qx.data.controller.List(anatomicalModelsModel, modelsUIList, "displayName"); membersCtrl.setDelegate({ - createItem: () => new osparc.vipMarket.AnatomicalModelListItem(), + createItem: () => new osparc.vipMarket.LicensedItemListItem(), bindItem: (ctrl, item, id) => { - ctrl.bindProperty("modelId", "modelId", null, item, id); + ctrl.bindProperty("key", "key", null, item, id); + ctrl.bindProperty("version", "version", null, item, id); ctrl.bindProperty("thumbnail", "thumbnail", null, item, id); - ctrl.bindProperty("name", "name", null, item, id); + ctrl.bindProperty("displayName", "displayName", null, item, id); ctrl.bindProperty("date", "date", null, item, id); ctrl.bindProperty("licensedItemId", "licensedItemId", null, item, id); ctrl.bindProperty("pricingPlanId", "pricingPlanId", null, item, id); - ctrl.bindProperty("purchases", "purchases", null, item, id); + ctrl.bindProperty("seats", "seats", null, item, id); }, configureItem: item => { item.subscribeToFilterGroup("vipModels"); @@ -147,17 +166,17 @@ qx.Class.define("osparc.vipMarket.VipMarket", { thumbnail: "@FontAwesome5Solid/spinner/32", name: this.tr("Loading"), }; - this.__anatomicalModelsModel.append(qx.data.marshal.Json.createModel(loadingModel)); + this.__anatomicalBundlesModel.append(qx.data.marshal.Json.createModel(loadingModel)); const anatomicModelDetails = this.getChildControl("models-details"); modelsUIList.addListener("changeSelection", e => { const selection = e.getData(); if (selection.length) { - const modelId = selection[0].getModelId(); - const modelFound = this.__anatomicalModels.find(anatomicalModel => anatomicalModel["modelId"] === modelId); - if (modelFound) { - anatomicModelDetails.setAnatomicalModelsData(modelFound); + const licensedItemId = selection[0].getLicensedItemId(); + const licensedItemBundle = this.__anatomicalBundles.find(anatomicalBundle => anatomicalBundle.getLicensedItemId() === licensedItemId); + if (licensedItemBundle) { + anatomicModelDetails.setAnatomicalModelsData(licensedItemBundle); return; } } @@ -165,152 +184,88 @@ qx.Class.define("osparc.vipMarket.VipMarket", { }, this); }, - __fetchModels: function(vipSubset) { - const licensedItemsStore = osparc.store.LicensedItems.getInstance(); - licensedItemsStore.getVipModels(vipSubset) - .then(allAnatomicalModels => { - const store = osparc.store.Store.getInstance(); - const contextWallet = store.getContextWallet(); - if (!contextWallet) { - return; - } - const walletId = contextWallet.getWalletId(); - Promise.all([ - licensedItemsStore.getLicensedItems(), - licensedItemsStore.getPurchasedLicensedItems(walletId), - ]) - .then(values => { - const licensedItems = values[0]; - const purchasesItems = values[1]; - - this.__anatomicalModels = []; - allAnatomicalModels.forEach(model => { - const modelId = model["modelId"]; - const licensedItem = licensedItems.find(licItem => licItem["name"] == modelId); - if (licensedItem) { - const anatomicalModel = osparc.utils.Utils.deepCloneObject(model); - anatomicalModel["date"] = new Date(anatomicalModel["date"]); - // attach license data - anatomicalModel["licensedItemId"] = licensedItem["licensedItemId"]; - anatomicalModel["pricingPlanId"] = licensedItem["pricingPlanId"]; - // attach leased data - anatomicalModel["purchases"] = []; // default - const purchasesItemsFound = purchasesItems.filter(purchasesItem => purchasesItem["licensedItemId"] === licensedItem["licensedItemId"]); - if (purchasesItemsFound.length) { - purchasesItemsFound.forEach(purchasesItemFound => { - anatomicalModel["purchases"].push({ - expiresAt: new Date(purchasesItemFound["expireAt"]), - numberOfSeats: purchasesItemFound["numOfSeats"], - }) - }); - } - this.__anatomicalModels.push(anatomicalModel); - } - }); - - this.__populateModels(); - - const anatomicModelDetails = this.getChildControl("models-details"); - anatomicModelDetails.addListener("modelPurchaseRequested", e => { - if (!contextWallet) { - return; - } - const { - modelId, - licensedItemId, - pricingPlanId, - pricingUnitId, - } = e.getData(); - let numberOfSeats = null; - const pricingUnit = osparc.store.Pricing.getInstance().getPricingUnit(pricingPlanId, pricingUnitId); - if (pricingUnit) { - const split = pricingUnit.getName().split(" "); - numberOfSeats = parseInt(split[0]); - } - licensedItemsStore.purchaseLicensedItem(licensedItemId, walletId, pricingPlanId, pricingUnitId, numberOfSeats) - .then(() => { - const expirationDate = osparc.study.PricingUnitLicense.getExpirationDate(); - const purchaseData = { - expiresAt: expirationDate, // get this info from the response - numberOfSeats, // get this info from the response - }; - - let msg = numberOfSeats; - msg += " seat" + (purchaseData["numberOfSeats"] > 1 ? "s" : ""); - msg += " rented until " + osparc.utils.Utils.formatDate(purchaseData["expiresAt"]); - osparc.FlashMessenger.getInstance().logAs(msg, "INFO"); - - const found = this.__anatomicalModels.find(model => model["modelId"] === modelId); - if (found) { - found["purchases"].push(purchaseData); - this.__populateModels(modelId); - anatomicModelDetails.setAnatomicalModelsData(found); - } - }) - .catch(err => { - const msg = err.message || this.tr("Cannot purchase model"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - }); - }, this); - - anatomicModelDetails.addListener("modelImportRequested", e => { - const { - modelId - } = e.getData(); - this.__sendImportModelMessage(modelId); - }, this); - }); - }) - .catch(err => console.error(err)); + setLicensedItems: function(licensedBundles) { + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + if (!contextWallet) { + return; + } + + this.__anatomicalBundles = licensedBundles; + + this.__populateModels(); + + const anatomicModelDetails = this.getChildControl("models-details"); + if (!anatomicModelDetails.hasListener("modelPurchaseRequested")) { + anatomicModelDetails.addListener("modelPurchaseRequested", e => { + const { + licensedItemId, + pricingPlanId, + pricingUnitId, + } = e.getData(); + this.__modelPurchaseRequested(licensedItemId, pricingPlanId, pricingUnitId); + }, this); + } + if (!anatomicModelDetails.hasListener("modelImportRequested")) { + anatomicModelDetails.addListener("modelImportRequested", e => { + const { + modelId, + categoryId, + } = e.getData(); + this.__sendImportModelMessage(modelId, categoryId); + }, this); + } }, - __populateModels: function(selectModelId) { - const models = this.__anatomicalModels; + __populateModels: function(selectLicensedItemId) { + const models = this.__anatomicalBundles; - this.__anatomicalModelsModel.removeAll(); + this.__anatomicalBundlesModel.removeAll(); const sortModel = sortBy => { models.sort((a, b) => { // first criteria - if (b["purchases"].length !== a["purchases"].length) { - // leased first - return b["purchases"].length - a["purchases"].length; + const nASeats = osparc.store.LicensedItems.seatsToNSeats(a.getSeats()); + const nBSeats = osparc.store.LicensedItems.seatsToNSeats(b.getSeats()); + if (nBSeats !== nASeats) { + // nSeats first + return nBSeats - nASeats; } // second criteria if (sortBy) { if (sortBy["sort"] === "name") { if (sortBy["order"] === "down") { // A -> Z - return a["name"].localeCompare(b["name"]); + return a.getDisplayName().localeCompare(b.getDisplayName()); } - return b["name"].localeCompare(a["name"]); + return b.getDisplayName().localeCompare(a.getDisplayName()); } else if (sortBy["sort"] === "date") { if (sortBy["order"] === "down") { // Now -> Yesterday - return b["date"] - a["date"]; + return b.getDate() - a.getDate(); } - return a["date"] - b["date"]; + return a.getDate() - b.getDate(); } } // default criteria // A -> Z - return a["name"].localeCompare(b["name"]); + return a.getDisplayName().localeCompare(b.getDisplayName()); }); }; sortModel(); - models.forEach(model => this.__anatomicalModelsModel.append(qx.data.marshal.Json.createModel(model))); + models.forEach(model => this.__anatomicalBundlesModel.append(qx.data.marshal.Json.createModel(model))); this.getChildControl("sort-button").addListener("sortBy", e => { - this.__anatomicalModelsModel.removeAll(); + this.__anatomicalBundlesModel.removeAll(); const sortBy = e.getData(); sortModel(sortBy); - models.forEach(model => this.__anatomicalModelsModel.append(qx.data.marshal.Json.createModel(model))); + models.forEach(model => this.__anatomicalBundlesModel.append(qx.data.marshal.Json.createModel(model))); }, this); // select model after timeout, there is something that changes the selection to empty after populating the list setTimeout(() => { const modelsUIList = this.getChildControl("models-list"); - if (selectModelId) { - const entryFound = modelsUIList.getSelectables().find(entry => "getModelId" in entry && entry.getModelId() === selectModelId); + if (selectLicensedItemId) { + const entryFound = modelsUIList.getSelectables().find(entry => "getLicensedItemId" in entry && entry.getLicensedItemId() === selectLicensedItemId); modelsUIList.setSelection([entryFound]); } else if (modelsUIList.getSelectables().length) { // select first @@ -319,7 +274,45 @@ qx.Class.define("osparc.vipMarket.VipMarket", { }, 100); }, - __sendImportModelMessage: function(modelId) { + __modelPurchaseRequested: function(licensedItemId, pricingPlanId, pricingUnitId) { + const store = osparc.store.Store.getInstance(); + const contextWallet = store.getContextWallet(); + if (!contextWallet) { + return; + } + const walletId = contextWallet.getWalletId(); + let numOfSeats = null; + const pricingUnit = osparc.store.Pricing.getInstance().getPricingUnit(pricingPlanId, pricingUnitId); + if (pricingUnit) { + numOfSeats = parseInt(pricingUnit.getExtraInfo()["num_of_seats"]); + } + const licensedItemsStore = osparc.store.LicensedItems.getInstance(); + licensedItemsStore.purchaseLicensedItem(licensedItemId, walletId, pricingPlanId, pricingUnitId, numOfSeats) + .then(purchaseData => { + const nSeats = purchaseData["numOfSeats"]; + let msg = nSeats; + msg += " seat" + (nSeats > 1 ? "s" : ""); + msg += " rented until " + osparc.utils.Utils.formatDate(new Date(purchaseData["expireAt"])); + osparc.FlashMessenger.logAs(msg, "INFO"); + + const found = this.__anatomicalBundles.find(model => model.getLicensedItemId() === licensedItemId); + if (found) { + found.getSeats().push({ + licensedItemId: purchaseData["licensedItemId"], + licensedItemPurchaseId: purchaseData["licensedItemPurchaseId"], + numOfSeats: purchaseData["numOfSeats"], + expireAt: new Date(purchaseData["expireAt"]), + }); + this.__populateModels(licensedItemId); + const anatomicModelDetails = this.getChildControl("models-details"); + anatomicModelDetails.setAnatomicalModelsData(found); + } + this.fireEvent("modelPurchased"); + }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Cannot purchase model"))); + }, + + __sendImportModelMessage: function(modelId, categoryId) { const store = osparc.store.Store.getInstance(); const currentStudy = store.getCurrentStudy(); const nodeId = this.getOpenBy(); @@ -328,6 +321,7 @@ qx.Class.define("osparc.vipMarket.VipMarket", { "type": "importModel", "message": { "modelId": modelId, + "categoryId": categoryId, }, }; if (currentStudy.sendMessageToIframe(nodeId, msg)) { diff --git a/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js b/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js index f5f48cf30d8..96ecc5c505d 100644 --- a/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js +++ b/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js @@ -68,6 +68,7 @@ qx.Class.define("osparc.widget.PersistentIframe", { showToolbar: { check: "Boolean", init: true, + event: "changeShowToolbar", apply: "__applyShowToolbar" } }, @@ -84,14 +85,14 @@ qx.Class.define("osparc.widget.PersistentIframe", { members: { __iframe: null, __syncScheduled: null, - __buttonContainer: null, + __buttonsContainer: null, __diskUsageIndicator: null, __reloadButton: null, __zoomButton: null, // override _createContentElement : function() { - let iframe = this.__iframe = new qx.ui.embed.Iframe(this.getSource()); + const iframe = this.__iframe = new qx.ui.embed.Iframe(this.getSource()); const persistentIframe = this; iframe.addListener("load", () => { const currentTheme = qx.theme.manager.Meta.getInstance().getTheme(); @@ -111,7 +112,7 @@ qx.Class.define("osparc.widget.PersistentIframe", { const host = window.location.host; iframeEl.setAttribute("allow", `clipboard-read; clipboard-write; from *.services.${host}`); - const buttonContainer = this.__buttonContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ + const buttonsContainer = this.__buttonsContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ alignX: "right", alignY: "middle" })); @@ -120,7 +121,7 @@ qx.Class.define("osparc.widget.PersistentIframe", { diskUsageIndicator.getChildControl("disk-indicator").set({ margin: 0 }); - buttonContainer.add(diskUsageIndicator); + buttonsContainer.add(diskUsageIndicator); const reloadButton = this.__reloadButton = this.self().createToolbarButton().set({ label: this.tr("Reload"), @@ -132,7 +133,7 @@ qx.Class.define("osparc.widget.PersistentIframe", { this.fireEvent("restart"); }, this); osparc.utils.Utils.setIdToWidget(reloadButton, "iFrameRestartBtn"); - buttonContainer.add(reloadButton); + buttonsContainer.add(reloadButton); const zoomButton = this.__zoomButton = this.self().createToolbarButton().set({ label: this.self().getZoomLabel(false), @@ -142,9 +143,9 @@ qx.Class.define("osparc.widget.PersistentIframe", { zoomButton.addListener("execute", e => { this.maximizeIFrame(!this.hasState("maximized")); }, this); - buttonContainer.add(zoomButton); + buttonsContainer.add(zoomButton); - appRoot.add(buttonContainer, { + appRoot.add(buttonsContainer, { top: this.self().HIDDEN_TOP }); standin.addListener("appear", e => { @@ -154,10 +155,11 @@ qx.Class.define("osparc.widget.PersistentIframe", { iframe.setLayoutProperties({ top: this.self().HIDDEN_TOP }); - buttonContainer.setLayoutProperties({ + buttonsContainer.setLayoutProperties({ top: this.self().HIDDEN_TOP }); }); + this.addListener("move", e => { // got to let the new layout render first or we don't see it this.__syncIframePos(); @@ -227,12 +229,12 @@ qx.Class.define("osparc.widget.PersistentIframe", { height: divSize.height - this.getToolbarHeight() }); - this.__buttonContainer.setLayoutProperties({ + this.__buttonsContainer.setLayoutProperties({ top: (divPos.top - iframeParentPos.top), right: (iframeParentPos.right - iframeParentPos.left - divPos.right) }); - this.__buttonContainer.setVisibility(this.isShowToolbar() ? "visible" : "excluded"); + this.__buttonsContainer.setVisibility(this.isShowToolbar() ? "visible" : "excluded"); }, 0); }, @@ -297,12 +299,12 @@ qx.Class.define("osparc.widget.PersistentIframe", { }, __handleIframeMessage: function(data, nodeId) { - if (data["type"] && data["message"]) { + if (data["type"]) { switch (data["type"]) { case "theme": { // switch theme driven by the iframe const message = data["message"]; - if (message.includes("osparc;theme=")) { + if (message && message.includes("osparc;theme=")) { const themeName = message.replace("osparc;theme=", ""); const validThemes = osparc.ui.switch.ThemeSwitcher.getValidThemes(); const themeFound = validThemes.find(theme => theme.basename === themeName); @@ -314,8 +316,16 @@ qx.Class.define("osparc.widget.PersistentIframe", { break; } case "openMarket": { - const category = data["message"] && data["message"]["category"]; - osparc.vipMarket.MarketWindow.openWindow(nodeId, category); + if (osparc.product.Utils.showS4LStore()) { + const category = data["message"] && data["message"]["category"]; + setTimeout(() => osparc.vipMarket.MarketWindow.openWindow(nodeId, category), 100); + } + break; + } + case "openWallets": { + if (osparc.desktop.credits.Utils.areWalletsEnabled()) { + setTimeout(() => osparc.desktop.credits.BillingCenterWindow.openWindow(), 100); + } break; } } diff --git a/services/static-webserver/client/source/class/osparc/widget/PreparingInputs.js b/services/static-webserver/client/source/class/osparc/widget/PreparingInputs.js index 6e050bfca38..511e6229782 100644 --- a/services/static-webserver/client/source/class/osparc/widget/PreparingInputs.js +++ b/services/static-webserver/client/source/class/osparc/widget/PreparingInputs.js @@ -25,7 +25,7 @@ qx.Class.define("osparc.widget.PreparingInputs", { this._setLayout(new qx.ui.layout.VBox(10)); - const text = this.tr("In order to move to this step, we need to prepare some inputs for you.
Here you can check the logs of the progress:"); + const text = this.tr("To proceed, we need to prepare some inputs. You can check the progress logs here:"); const title = new qx.ui.basic.Label(text).set({ font: "text-14", rich: true diff --git a/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js b/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js index d9c3e257d81..09cadba98af 100644 --- a/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js +++ b/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js @@ -25,7 +25,8 @@ qx.Class.define("osparc.widget.ProgressSequence", { this.set({ backgroundColor: "window-popup-background", - paddingBottom: 8 + paddingBottom: 8, + minWidth: 400, }); this.__initLayout(title); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeDataManager.js b/services/static-webserver/client/source/class/osparc/widget/StudyDataManager.js similarity index 77% rename from services/static-webserver/client/source/class/osparc/widget/NodeDataManager.js rename to services/static-webserver/client/source/class/osparc/widget/StudyDataManager.js index 397477d1d94..092d04be44c 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeDataManager.js +++ b/services/static-webserver/client/source/class/osparc/widget/StudyDataManager.js @@ -25,12 +25,12 @@ * Here is a little example of how to use the widget. * *
- *   let dataManager = new osparc.widget.NodeDataManager(null, nodeId);
+ *   let dataManager = new osparc.widget.StudyDataManager(null, nodeId);
  *   this.getRoot().add(dataManager);
  * 
*/ -qx.Class.define("osparc.widget.NodeDataManager", { +qx.Class.define("osparc.widget.StudyDataManager", { extend: qx.ui.core.Widget, /** @@ -42,22 +42,26 @@ qx.Class.define("osparc.widget.NodeDataManager", { this._setLayout(new qx.ui.layout.VBox(10)); - if (studyId) { - this.set({ - studyId - }); - } + this.setStudyId(studyId); if (nodeId) { - this.set({ - nodeId - }); + this.setNodeId(nodeId); } this.__buildLayout(); this.__reloadTree(); }, + statics: { + popUpInWindow: function(studyId, nodeId, title) { + const studyDataManager = new osparc.widget.StudyDataManager(studyId, nodeId); + if (!title) { + title = osparc.product.Utils.getStudyAlias({firstUpperCase: true}) + qx.locale.Manager.tr(" Files"); + } + return osparc.ui.window.Window.popUpInWindow(studyDataManager, title, osparc.dashboard.ResourceDetails.WIDTH, osparc.dashboard.ResourceDetails.HEIGHT); + }, + }, + properties: { studyId: { check: "String", @@ -91,9 +95,6 @@ qx.Class.define("osparc.widget.NodeDataManager", { const treeFolderView = this.getChildControl("tree-folder-view"); treeFolderView.getChildControl("folder-tree").setBackgroundColor("window-popup-background"); - const reloadButton = treeFolderView.getChildControl("reload-button"); - reloadButton.addListener("execute", () => this.__reloadTree(), this); - const selectedFileLayout = treeFolderView.getChildControl("folder-viewer").getChildControl("selected-file-layout"); selectedFileLayout.addListener("fileDeleted", e => this.__fileDeleted(e.getData()), this); }, @@ -103,11 +104,12 @@ qx.Class.define("osparc.widget.NodeDataManager", { const foldersTree = treeFolderView.getChildControl("folder-tree"); foldersTree.resetCache(); - if (this.getStudyId()) { - foldersTree.populateStudyTree(this.getStudyId()); - } if (this.getNodeId()) { - foldersTree.populateNodeTree(this.getNodeId()); + foldersTree.populateNodeTree(this.getStudyId(), this.getNodeId()); + treeFolderView.requestSize(this.getStudyId(), this.getNodeId()); + } else if (this.getStudyId()) { + foldersTree.populateStudyTree(this.getStudyId()); + treeFolderView.requestSize(this.getStudyId()); } const folderViewer = treeFolderView.getChildControl("folder-viewer"); @@ -134,13 +136,12 @@ qx.Class.define("osparc.widget.NodeDataManager", { treeFolderView.openPath(path); }; - if (this.getStudyId()) { - foldersTree.populateStudyTree(this.getStudyId()) + if (this.getNodeId()) { + foldersTree.populateNodeTree(this.getStudyId(), this.getNodeId()) .then(() => openSameFolder()) .catch(err => console.error(err)); - } - if (this.getNodeId()) { - foldersTree.populateNodeTree(this.getNodeId()) + } else if (this.getStudyId()) { + foldersTree.populateStudyTree(this.getStudyId()) .then(() => openSameFolder()) .catch(err => console.error(err)); } diff --git a/services/static-webserver/client/source/class/osparc/widget/logger/LoggerView.js b/services/static-webserver/client/source/class/osparc/widget/logger/LoggerView.js index c9a1795bf4a..1658268f563 100644 --- a/services/static-webserver/client/source/class/osparc/widget/logger/LoggerView.js +++ b/services/static-webserver/client/source/class/osparc/widget/logger/LoggerView.js @@ -126,6 +126,7 @@ qx.Class.define("osparc.widget.logger.LoggerView", { switch (id) { case "toolbar": control = new qx.ui.toolbar.ToolBar(); + control.setBackgroundColor("transparent"); this._add(control); break; case "pin-node": { @@ -143,7 +144,9 @@ qx.Class.define("osparc.widget.logger.LoggerView", { control = new qx.ui.form.TextField().set({ appearance: "toolbar-textfield", liveUpdate: true, - placeholder: this.tr("Filter") + placeholder: this.tr("Filter"), + marginLeft: 5, + marginRight: 5, }); osparc.utils.Utils.setIdToWidget(control, "logsFilterField"); toolbar.add(control, { @@ -221,7 +224,7 @@ qx.Class.define("osparc.widget.logger.LoggerView", { toolTipText: this.tr("Download logs"), appearance: "toolbar-button" }); - osparc.utils.Utils.setIdToWidget(control, "downloadLogsButton"); + osparc.utils.Utils.setIdToWidget(control, "__downloadLogsButton"); toolbar.add(control); break; } @@ -256,7 +259,7 @@ qx.Class.define("osparc.widget.logger.LoggerView", { toolbar.add(copySelectedToClipboardButton); const downloadButton = this.getChildControl("download-logs-button"); - downloadButton.addListener("execute", () => this.downloadLogs(), this); + downloadButton.addListener("execute", () => this.__downloadLogs(), this); toolbar.add(downloadButton); return toolbar; @@ -345,7 +348,7 @@ qx.Class.define("osparc.widget.logger.LoggerView", { } }, - downloadLogs: function() { + __downloadLogs: function() { const logs = this.__getLogsString(); const blob = new Blob([logs], {type: "text/plain"}); osparc.utils.Utils.downloadBlobContent(blob, "logs.log"); diff --git a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageController.js b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageController.js index c0a3a8e835d..0ae4c40e3ef 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageController.js +++ b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageController.js @@ -134,14 +134,14 @@ qx.Class.define("osparc.workbench.DiskUsageController", { case "CRITICAL": if (shouldDisplayMessage(prevDiskUsageState, warningLevel)) { message = qx.locale.Manager.tr(`Out of Disk Space on "Service Filesystem" for ${nodeName}
The volume Service Filesystem has only ${freeSpace} disk space remaining. You can free up disk space by removing unused files in your service. Alternatively, you can run your service with a pricing plan that supports your storage requirements.`); - osparc.FlashMessenger.getInstance().logAs(message, "ERROR"); + osparc.FlashMessenger.logError(message); this.__prevDiskUsageStateList[objIndex].state = warningLevel; } break; case "WARNING": if (shouldDisplayMessage(prevDiskUsageState, warningLevel)) { message = qx.locale.Manager.tr(`Low Disk Space on "Service Filesystem" for ${nodeName}
The volume Service Filesystem has only ${freeSpace} disk space remaining. You can free up disk space by removing unused files in your service. Alternatively, you can run your service with a pricing plan that supports your storage requirements.`); - osparc.FlashMessenger.getInstance().logAs(message, "WARNING"); + osparc.FlashMessenger.logAs(message, "WARNING"); this.__prevDiskUsageStateList[objIndex].state = warningLevel; } break; diff --git a/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js b/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js index cc1af9a52e3..d1535459f94 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js @@ -150,7 +150,7 @@ qx.Class.define("osparc.workbench.NodeUI", { const statusLabel = nodeStatus.getChildControl("label"); const requestOpenLogger = () => this.fireEvent("requestOpenLogger"); const evaluateLabel = () => { - const failed = statusLabel.getValue() === "Failed"; + const failed = statusLabel.getValue() === "Unsuccessful"; statusLabel.setCursor(failed ? "pointer" : "auto"); if (nodeStatus.hasListener("tap")) { nodeStatus.removeListener("tap", requestOpenLogger); diff --git a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js index d14212aa20f..1679b23bcd6 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js +++ b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js @@ -225,35 +225,33 @@ qx.Class.define("osparc.workbench.ServiceCatalog", { }); osparc.service.Utils.sortObjectsBasedOn(filteredServices, this.__sortBy); - const filteredServicesObj = this.__filteredServicesObj = osparc.service.Utils.convertArrayToObject(filteredServices); - - const groupedServicesList = []; - for (const key in filteredServicesObj) { - const serviceMetadata = osparc.service.Utils.getLatest(key); - if (serviceMetadata) { - const service = new osparc.data.model.Service(serviceMetadata); - groupedServicesList.push(service); - } - } + this.__filteredServicesObj = osparc.service.Utils.convertArrayToObject(filteredServices); + + const servicesModels = []; + filteredServices.forEach(filteredService => { + const service = new osparc.data.model.Service(filteredService); + servicesModels.push(service); + }); - this.__serviceList.setModel(new qx.data.Array(groupedServicesList)); + this.__serviceList.setModel(new qx.data.Array(servicesModels)); }, __changedSelection: function(key) { if (this.__versionsBox) { - let selectBox = this.__versionsBox; + const selectBox = this.__versionsBox; selectBox.removeAll(); if (key in this.__filteredServicesObj) { const latest = new qx.ui.form.ListItem(this.self().LATEST); latest.version = this.self().LATEST; selectBox.add(latest); - const versions = osparc.service.Utils.getVersions(key); - versions.forEach(version => { - const listItem = osparc.service.Utils.versionToListItem(key, version); - selectBox.add(listItem); - }); - osparc.utils.Utils.growSelectBox(selectBox, 200); - selectBox.setSelection([latest]); + osparc.store.Services.populateVersionsSelectBox(key, selectBox) + .then(() => { + osparc.utils.Utils.growSelectBox(selectBox, 200); + const idx = selectBox.getSelectables().indexOf(latest); + if (idx > -1) { + selectBox.setSelection([latest]); + } + }); } } if (this.__addBtn) { diff --git a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js index 19a123cf547..ec3f146fef3 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js @@ -202,7 +202,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, __addStartHint: function() { - this.__startHint = new qx.ui.basic.Label(this.tr("Double click to start adding a node")).set({ + this.__startHint = new qx.ui.basic.Label(this.tr("Double-click to add a node")).set({ font: "workbench-start-hint", textColor: "workbench-start-hint", visibility: "excluded" @@ -299,7 +299,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { return null; } if (this.getStudy().isPipelineRunning()) { - osparc.FlashMessenger.getInstance().logAs(osparc.data.model.Workbench.CANT_ADD_NODE, "ERROR"); + osparc.FlashMessenger.logError(osparc.data.model.Workbench.CANT_ADD_NODE); return null; } const srvCat = new osparc.workbench.ServiceCatalog(); @@ -1919,7 +1919,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }; if (type === "rect") { if ([null, undefined].includes(annotation)) { - osparc.FlashMessenger.getInstance().logAs(this.tr("Draw a rectangle first"), "WARNING"); + osparc.FlashMessenger.logAs(this.tr("Draw a rectangle first"), "WARNING"); return false; } serializeData.attributes = osparc.wrapper.Svg.getRectAttributes(annotation); @@ -2005,7 +2005,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { x: e.offsetX, y: e.offsetY }; - const service = qx.data.marshal.Json.createModel(osparc.service.Utils.getFilePicker()); + const service = qx.data.marshal.Json.createModel(osparc.store.Services.getFilePicker()); const nodeUI = await this.__addNode(service, pos); if (nodeUI) { const filePicker = new osparc.file.FilePicker(nodeUI.getNode(), "workbench"); @@ -2013,10 +2013,10 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { filePicker.addListener("fileUploaded", () => this.fireDataEvent("nodeSelected", nodeUI.getNodeId()), this); } } else { - osparc.FlashMessenger.getInstance().logAs(osparc.file.FileDrop.ONE_FILE_ONLY, "ERROR"); + osparc.FlashMessenger.logError(osparc.file.FileDrop.ONE_FILE_ONLY); } } else { - osparc.FlashMessenger.getInstance().logAs(this.tr("Folders are not accepted. You might want to upload a zip file."), "ERROR"); + osparc.FlashMessenger.logError(this.tr("Folders are not accepted. Please upload a zip file instead.")); } } }, @@ -2028,7 +2028,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { const data = this.__isDraggingLink["dragData"]; this.__isDraggingLink = null; const pos = this.__pointerEventToWorkbenchPos(e, false); - const service = qx.data.marshal.Json.createModel(osparc.service.Utils.getFilePicker()); + const service = qx.data.marshal.Json.createModel(osparc.store.Services.getFilePicker()); const nodeUI = await this.__addNode(service, pos); if (nodeUI) { const node = nodeUI.getNode(); diff --git a/services/static-webserver/client/source/resource/common/common.css b/services/static-webserver/client/source/resource/common/common.css index 034546a9155..d032c109151 100644 --- a/services/static-webserver/client/source/resource/common/common.css +++ b/services/static-webserver/client/source/resource/common/common.css @@ -15,7 +15,11 @@ } .rotate { - animation: rotation 1.5s infinite linear; + animation: rotation 2s infinite linear; +} + +.rotateSlow { + animation: rotation 4s infinite linear; } .verticalText { diff --git a/services/static-webserver/client/source/resource/osparc/circle-info-text.svg b/services/static-webserver/client/source/resource/osparc/circle-info-text.svg new file mode 100644 index 00000000000..b753c59738b --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/circle-info-text.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/services/static-webserver/client/source/resource/osparc/circle-stop-text.svg b/services/static-webserver/client/source/resource/osparc/circle-stop-text.svg new file mode 100644 index 00000000000..1470fd0f295 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/circle-stop-text.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/services/static-webserver/client/source/resource/osparc/icons/MC.png b/services/static-webserver/client/source/resource/osparc/icons/MC.png new file mode 100644 index 00000000000..e0b1b0e22dd Binary files /dev/null and b/services/static-webserver/client/source/resource/osparc/icons/MC.png differ diff --git a/services/static-webserver/client/source/resource/osparc/icons/PM.png b/services/static-webserver/client/source/resource/osparc/icons/PM.png new file mode 100644 index 00000000000..b37a969a9bb Binary files /dev/null and b/services/static-webserver/client/source/resource/osparc/icons/PM.png differ diff --git a/services/static-webserver/client/source/resource/osparc/icons/TI.png b/services/static-webserver/client/source/resource/osparc/icons/TI.png new file mode 100644 index 00000000000..81355a82873 Binary files /dev/null and b/services/static-webserver/client/source/resource/osparc/icons/TI.png differ diff --git a/services/static-webserver/client/source/resource/osparc/icons/diagram.png b/services/static-webserver/client/source/resource/osparc/icons/diagram.png new file mode 100644 index 00000000000..7ed509475bd Binary files /dev/null and b/services/static-webserver/client/source/resource/osparc/icons/diagram.png differ diff --git a/services/static-webserver/client/source/resource/osparc/icons/pMC.png b/services/static-webserver/client/source/resource/osparc/icons/pMC.png new file mode 100644 index 00000000000..0af588f4b51 Binary files /dev/null and b/services/static-webserver/client/source/resource/osparc/icons/pMC.png differ diff --git a/services/static-webserver/client/source/resource/osparc/icons/pPM.png b/services/static-webserver/client/source/resource/osparc/icons/pPM.png new file mode 100644 index 00000000000..64eb011b33d Binary files /dev/null and b/services/static-webserver/client/source/resource/osparc/icons/pPM.png differ diff --git a/services/static-webserver/client/source/resource/osparc/icons/pTI.png b/services/static-webserver/client/source/resource/osparc/icons/pTI.png new file mode 100644 index 00000000000..911dad5a02b Binary files /dev/null and b/services/static-webserver/client/source/resource/osparc/icons/pTI.png differ diff --git a/services/static-webserver/client/source/resource/osparc/logs-text.svg b/services/static-webserver/client/source/resource/osparc/logs-text.svg new file mode 100644 index 00000000000..20dd82103ce --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/logs-text.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/services/static-webserver/client/source/resource/osparc/market/AnimalWholeBody.svg b/services/static-webserver/client/source/resource/osparc/market/AnimalWholeBody.svg new file mode 100644 index 00000000000..cbfbc598fc1 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/market/AnimalWholeBody.svg @@ -0,0 +1,68 @@ + + + + + + + + + ModelsDownload + + + + + + + + + + + + + diff --git a/services/static-webserver/client/source/resource/osparc/market/ComputationalPhantom.svg b/services/static-webserver/client/source/resource/osparc/market/ComputationalPhantom.svg new file mode 100644 index 00000000000..806af72544a --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/market/ComputationalPhantom.svg @@ -0,0 +1,48 @@ + + + + + + PhantomsDownload + + + + + + diff --git a/services/static-webserver/client/source/resource/osparc/market/HumanBodyRegion.svg b/services/static-webserver/client/source/resource/osparc/market/HumanBodyRegion.svg new file mode 100644 index 00000000000..f3fd1e47848 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/market/HumanBodyRegion.svg @@ -0,0 +1,60 @@ + + + + + + + + + HumanPartsDownload + + + + + + + + + + diff --git a/services/static-webserver/client/source/resource/osparc/market/HumanWholeBody.svg b/services/static-webserver/client/source/resource/osparc/market/HumanWholeBody.svg new file mode 100644 index 00000000000..2321ac47670 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/market/HumanWholeBody.svg @@ -0,0 +1,51 @@ + + + ModelsDownload + diff --git a/services/static-webserver/client/source/resource/osparc/market/RentedModels.svg b/services/static-webserver/client/source/resource/osparc/market/RentedModels.svg new file mode 100644 index 00000000000..12fa9d1d5c1 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/market/RentedModels.svg @@ -0,0 +1,53 @@ + + + + + + ModelsDownload + + + + + + + + + + ModelsDownload + + + + diff --git a/services/static-webserver/client/source/resource/osparc/mock_jobs.json b/services/static-webserver/client/source/resource/osparc/mock_jobs.json new file mode 100644 index 00000000000..3aa775971e7 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/mock_jobs.json @@ -0,0 +1,85 @@ +{ + "jobs": [{ + "job_id": "0cde4607-07de-4bb5-b3e6-487f22387a70", + "solver": "isolve:2.4.12", + "status": "PUBLISHED", + "progress": 0, + "submitted_at": "2024-11-27 14:03:17.357523", + "started_at": null, + "instance": "p5.8xlarge" + }, { + "job_id": "5685a699-4927-479e-9b34-e5ab1616303a", + "solver": "fenics:1.0.4", + "status": "WAITING FOR RESOURCES", + "progress": 2, + "submitted_at": "2024-11-27 14:03:17.413844", + "started_at": "2024-11-27 16:03:17.413844", + "instance": "p5.8xlarge" + }, { + "job_id": "a8ca6a02-8816-48a6-8c6b-b94b3e431c8c", + "solver": "isolve:2.4.12", + "status": "WAITING FOR CLUSTER", + "progress": 0, + "submitted_at": "2024-11-27 14:03:17.357523", + "started_at": null, + "instance": "p5.8xlarge" + }, { + "job_id": "a8ca6a02-8816-48a6-8c6b-b94b3e431c8d", + "solver": "sleeper:2.1.2", + "status": "STARTED", + "progress": 50, + "submitted_at": "2024-11-27 14:03:17.357523", + "started_at": "2025-03-19 14:03:17.357523", + "instance": "p5.8xlarge" + }], + "jobs_info": { + "a8ca6a02-8816-48a6-8c6b-b94b3e431c8d": { + "parent_project_id" : "522f544a-1e05-461e-82ed-e26806e4640d", + "parent_project": "My Project", + "job_id": "a8ca6a02-8816-48a6-8c6b-b94b3e431c8d", + "job_name": "EM", + "log_state": 3, + "solver_key": "simcore/services/comp/sleeper", + "solver_version": "2.1.2", + "version_display" : "The latest", + "input_file_path": "/home/user/blah_blah_a8ca6a02-8816-48a6-8c6b-b94b3e431c8d_Input.h5", + "internal_job_id": "19b883a4-8407-4b81-9c5b-abe5a7f42bc8" + }, + "5685a699-4927-479e-9b34-e5ab1616303a": { + "parent_project_id" : "522f544a-1e05-461e-82ed-e26806e4640d", + "parent_project": "My Project", + "job_id": "5685a699-4927-479e-9b34-e5ab1616303a", + "job_name": "EM", + "log_state": 3, + "solver_key": "simcore/services/comp/sleeper", + "solver_version": "2.1.2", + "version_display" : "The latest", + "input_file_path": "/home/user/blah_blah_5685a699-4927-479e-9b34-e5ab1616303a_Input.h5", + "internal_job_id": "19b883a4-8407-4b81-9c5b-abe5a7f42bc8" + }, + "a8ca6a02-8816-48a6-8c6b-b94b3e431c8c": { + "parent_project_id" : "522f544a-1e05-461e-82ed-e26806e4640d", + "parent_project": "My Project", + "job_id": "a8ca6a02-8816-48a6-8c6b-b94b3e431c8c", + "job_name": "EM", + "log_state": 3, + "solver_key": "simcore/services/comp/sleeper", + "solver_version": "2.1.2", + "version_display" : "The latest", + "input_file_path": "/home/user/blah_blah_a8ca6a02-8816-48a6-8c6b-b94b3e431c8c_Input.h5", + "internal_job_id": "19b883a4-8407-4b81-9c5b-abe5a7f42bc8" + }, + "0cde4607-07de-4bb5-b3e6-487f22387a70": { + "parent_project_id" : "522f544a-1e05-461e-82ed-e26806e4640d", + "parent_project": "My Project", + "job_id": "0cde4607-07de-4bb5-b3e6-487f22387a70", + "job_name": "EM", + "log_state": 3, + "solver_key": "simcore/services/comp/sleeper", + "solver_version": "2.1.2", + "version_display" : "The latest", + "input_file_path": "/home/user/blah_blah_0cde4607-07de-4bb5-b3e6-487f22387a70_Input.h5", + "internal_job_id": "19b883a4-8407-4b81-9c5b-abe5a7f42bc8" + } + } +} diff --git a/services/static-webserver/client/source/resource/osparc/new_studies.json b/services/static-webserver/client/source/resource/osparc/new_studies.json deleted file mode 100644 index ed29e1145d5..00000000000 --- a/services/static-webserver/client/source/resource/osparc/new_studies.json +++ /dev/null @@ -1,210 +0,0 @@ -{ - "tis": { - "categories": [{ - "id": "precomputed", - "title": "Precomputed" - }, { - "id": "personalized", - "title": "Personalized", - "description": "In the process, TIP will launch simulations on AWS.
The associated resource costs will be deduced from your Credits." - }], - "resources": [{ - "resourceType": "template", - "expectedTemplateLabel": "TI Planning Tool", - "title": "Classic TI", - "newStudyLabel": "Classic TI", - "category": "precomputed", - "idToWidget": "newTIPlanButton" - }, { - "resourceType": "template", - "expectedTemplateLabel": "mcTI Planning Tool", - "title": "Multichannel TI", - "newStudyLabel": "Multichannel TI", - "category": "precomputed", - "idToWidget": "newMTIPlanButton" - }, { - "resourceType": "template", - "expectedTemplateLabel": "pmTI Planning Tool", - "title": "Phase-modulation TI", - "newStudyLabel": "Phase-modulation TI", - "category": "precomputed", - "idToWidget": "newPMTIPlanButton" - }, { - "resourceType": "template", - "expectedTemplateLabel": "personalized TI Planning Tool", - "title": "Personalized Classic TI", - "newStudyLabel": "Personalized Classic TI", - "category": "personalized", - "idToWidget": "personalizationNewTIPlanButton" - }, { - "resourceType": "template", - "expectedTemplateLabel": "personalized mcTI Planning Tool", - "title": "Personalized Multichannel TI", - "newStudyLabel": "Personalized Multichannel TI", - "category": "personalized", - "idToWidget": "personalizationNewMTIPlanButton" - }, { - "resourceType": "template", - "expectedTemplateLabel": "personalized pmTI Planning Tool", - "title": "Personalized Phase-modulation TI", - "newStudyLabel": "Personalized Phase-modulation TI", - "category": "personalized", - "idToWidget": "personalizationNewPMTIPlanButton" - }] - }, - "tiplite": { - "categories": [{ - "id": "precomputed", - "title": "Precomputed" - }, { - "id": "personalized", - "title": "Personalized", - "description": "In the process, TIP will launch simulations on AWS.
The associated resource costs will be deduced from your Credits." - }], - "resources": [{ - "resourceType": "template", - "expectedTemplateLabel": "TI Planning Tool", - "title": "Classic TI", - "newStudyLabel": "Classic TI", - "category": "precomputed", - "idToWidget": "newTIPlanButton" - }, { - "showDisabled": true, - "reason": "Not available in ${replace_me_product_name}", - "resourceType": "template", - "expectedTemplateLabel": "mcTI Planning Tool", - "title": "Multichannel TI", - "newStudyLabel": "Multichannel TI", - "category": "precomputed", - "idToWidget": "newMTIPlanButton" - }, { - "showDisabled": true, - "reason": "Not available in ${replace_me_product_name}", - "resourceType": "template", - "expectedTemplateLabel": "pmTI Planning Tool", - "title": "Phase-modulation TI", - "newStudyLabel": "Phase-modulation TI", - "category": "precomputed", - "idToWidget": "newPMTIPlanButton" - }, { - "showDisabled": true, - "reason": "Not available in ${replace_me_product_name}", - "resourceType": "template", - "expectedTemplateLabel": "personalized TI Planning Tool", - "title": "Personalized Classic TI", - "newStudyLabel": "Personalized Classic TI", - "category": "personalized", - "idToWidget": "personalizationNewTIPlanButton" - }, { - "showDisabled": true, - "reason": "Not available in ${replace_me_product_name}", - "resourceType": "template", - "expectedTemplateLabel": "personalized mcTI Planning Tool", - "title": "Personalized Multichannel TI", - "newStudyLabel": "Personalized Multichannel TI", - "category": "personalized", - "idToWidget": "personalizationNewMTIPlanButton" - }, { - "showDisabled": true, - "reason": "Not available in ${replace_me_product_name}", - "resourceType": "template", - "expectedTemplateLabel": "personalized pmTI Planning Tool", - "title": "Personalized Phase-modulation TI", - "newStudyLabel": "Personalized Phase-modulation TI", - "category": "personalized", - "idToWidget": "personalizationNewPMTIPlanButton" - }] - }, - "s4l": { - "categories": [{ - "id": "apps", - "title": "Apps" - }, { - "id": "osparc", - "title": "oSPARC" - }], - "resources": [{ - "category": "apps", - "resourceType": "service", - "expectedKey": "simcore/services/dynamic/s4l-ui", - "title": "Sim4Life", - "newStudyLabel": "New S4L project", - "idToWidget": "startS4LButton" - }, { - "category": "apps", - "resourceType": "service", - "expectedKey": "simcore/services/dynamic/iseg-web", - "title": "iSEG", - "newStudyLabel": "New iSEG project" - }, { - "category": "apps", - "resourceType": "service", - "expectedKey": "simcore/services/dynamic/s4l-jupyter", - "title": "Jupyter Lab", - "icon": "https://upload.wikimedia.org/wikipedia/commons/3/38/Jupyter_logo.svg", - "newStudyLabel": "New S4L Jupyter Lab" - }, { - "category": "osparc", - "resourceType": "study", - "icon": "@FontAwesome5Solid/file/18", - "title": "Empty Pipeline", - "newStudyLabel": "New Project", - "idToWidget": "emptyStudyBtn" - }] - }, - "s4lacad": { - "categories": [{ - "id": "apps", - "title": "Apps" - }, { - "id": "osparc", - "title": "oSPARC" - }], - "resources": [{ - "category": "apps", - "resourceType": "service", - "expectedKey": "simcore/services/dynamic/s4l-ui", - "title": "Sim4Life", - "newStudyLabel": "New S4L project", - "idToWidget": "startS4LButton" - }, { - "category": "apps", - "resourceType": "service", - "expectedKey": "simcore/services/dynamic/iseg-web", - "title": "iSEG", - "newStudyLabel": "New iSEG project" - }, { - "category": "apps", - "resourceType": "service", - "expectedKey": "simcore/services/dynamic/s4l-jupyter", - "icon": "https://upload.wikimedia.org/wikipedia/commons/3/38/Jupyter_logo.svg", - "title": "Jupyter Lab", - "newStudyLabel": "New S4L Jupyter Lab" - }, { - "category": "osparc", - "resourceType": "study", - "icon": "@FontAwesome5Solid/file/18", - "title": "Empty Pipeline", - "newStudyLabel": "New Project", - "idToWidget": "emptyStudyBtn" - }] - }, - "s4llite": { - "resources": [{ - "resourceType": "service", - "expectedKey": "simcore/services/dynamic/s4l-ui-lite", - "title": "${replace_me_product_name}", - "newStudyLabel": "New Project", - "idToWidget": "startS4LButton" - }] - }, - "osparc": { - "resources": [{ - "resourceType": "study", - "icon": "@FontAwesome5Solid/file/18", - "title": "Empty Study", - "newStudyLabel": "New Study", - "idToWidget": "emptyStudyBtn" - }] - } -} diff --git a/services/static-webserver/client/source/resource/osparc/tours/osparc_tours.json b/services/static-webserver/client/source/resource/osparc/tours/osparc_tours.json new file mode 100644 index 00000000000..24ff1e63ef1 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/tours/osparc_tours.json @@ -0,0 +1,108 @@ +{ + "studies": { + "id": "studies", + "name": "Studies", + "description": "All you need to know about Study handling", + "context": "osparc-test-id=newPlusBtn", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=newPlusBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=newPlusMenu", + "title": "Create Studies", + "text": "Clicking on the (+) New button, allows you to create new Studies or new Folders to organize the studies", + "placement": "right" + }, { + "anchorEl": "osparc-test-id=searchBarFilter-textField-study", + "title": "Filter and Search", + "text": "This tool allows you to search Studies, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=studyItemMenuButton", + "action": "open" + }, + "anchorEl": "osparc-test-id=studyItemMenuMenu", + "title": "More options button", + "text": "On the Study card, you can use the three dots button to access more information and operation on the Study.", + "placement": "left" + }, { + "anchorEl": "osparc-test-id=updateStudyBtn", + "title": "Update Services", + "text": "On the Study card, you can use the Update button to update the corresponding service to the latest version.", + "placement": "bottom" + }] + }, + "dashboard": { + "id": "dashboard", + "name": "Dashboard", + "description": "Introduction to the Dashboard tabs", + "context": "osparc-test-id=dashboardTabs", + "steps": [{ + "anchorEl": "osparc-test-id=dashboardTabs", + "title": "Dashboard Menu", + "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Studies, Templates, Services and Data.", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=studiesTabBtn" + }, + "anchorEl": "osparc-test-id=studiesTabBtn", + "text": "Any Study is accessible via the Dashboard. The Studies, which belong to or are shared with you, can be found here. You can also create Folders to help you organize the Studies", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=templatesTabBtn" + }, + "anchorEl": "osparc-test-id=templatesTabBtn", + "text": "Clicking on a Template will create a copy of that Study, which will appear in your own Studies tab with the same name as the Template. Any changes you make to this copy will not affect the original Template.", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=servicesTabBtn" + }, + "anchorEl": "osparc-test-id=servicesTabBtn", + "text": "Every Study in oSparc is composed of so-called Services.
These are building blocks for Studies and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Study.", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=dataTabBtn" + }, + "anchorEl": "osparc-test-id=dataTabBtn", + "text": "All the Data of the Studies you have access to can bre explored here.", + "placement": "bottom" + }] + }, + "navbar": { + "id": "navbar", + "name": "Navigation Bar", + "description": "Introduction to the Navigation Bar", + "context": "osparc-test-id=navigationBar", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=notificationsButton", + "event": "tap" + }, + "anchorEl": "osparc-test-id=notificationsContainer", + "text": "By clicking on the Bell, you will you see notifications about which Studies, Templates and Organizations have been shared with you.", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=helpNavigationBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=helpNavigationMenu", + "text": "Under the question mark, you will find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", + "placement": "left" + }, { + "beforeClick": { + "selector": "osparc-test-id=userMenuBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=userMenuMenu", + "text": "The User Menu gives you access to Your Account, Organizations and more.", + "placement": "left" + }] + } +} diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json index 492544fa598..0e5f056a68c 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json @@ -1,4 +1,39 @@ { + "projects": { + "id": "projects", + "name": "Projects", + "description": "All you need to know about Project handling", + "context": "osparc-test-id=newPlusBtn", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=newPlusBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=newPlusMenu", + "title": "Start Sim4Life and more", + "text": "Clicking on the (+) New button, allows you to create new Sim4Life projects or new Folders to organize the projects", + "placement": "right" + }, { + "anchorEl": "osparc-test-id=searchBarFilter-textField-study", + "title": "Filter and Search", + "text": "This tool allows you to search Projects, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=studyItemMenuButton", + "action": "open" + }, + "anchorEl": "osparc-test-id=studyItemMenuMenu", + "title": "More options button", + "text": "On the Project card, you can use the three dots button to access more information and operation on the Project.", + "placement": "left" + }, { + "anchorEl": "osparc-test-id=updateStudyBtn", + "title": "Update Services", + "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", + "placement": "bottom" + }] + }, "dashboard": { "id": "dashboard", "name": "Dashboard", @@ -28,7 +63,7 @@ "selector": "osparc-test-id=servicesTabBtn" }, "anchorEl": "osparc-test-id=servicesTabBtn", - "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", + "text": "Every Project in Sim4Life is composed of at least one so-called Service.
Services are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", "placement": "bottom" }] }, @@ -62,39 +97,5 @@ "text": "The User Menu gives you access to Your Account, Billing Center, Preferences, Organizations and more.", "placement": "left" }] - }, - "projects": { - "id": "projects", - "name": "Projects", - "description": "All you need to know about Project handling", - "context": "osparc-test-id=studiesTabBtn", - "steps": [{ - "beforeClick": { - "selector": "osparc-test-id=studiesTabBtn" - }, - "anchorEl": "osparc-test-id=startS4LButton", - "title": "Start Sim4Life", - "text": "Clicking on this (+) Start Sim4Life button, allows you to create and open a new Sim4Life project", - "placement": "right" - }, { - "anchorEl": "osparc-test-id=searchBarFilter-textField-study", - "title": "Filter and Search", - "text": "This tool allows you to filter Projects, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", - "placement": "bottom" - }, { - "beforeClick": { - "selector": "osparc-test-id=studyItemMenuButton", - "action": "open" - }, - "anchorEl": "osparc-test-id=studyItemMenuMenu", - "title": "More options button", - "text": "On the Project card, you can use the three dots button to access more information and operation on the Project.", - "placement": "left" - }, { - "anchorEl": "osparc-test-id=updateStudyBtn", - "title": "Update Services", - "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", - "placement": "bottom" - }] } } diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json index e1e509a6f83..7dfbd752dbc 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json @@ -1,4 +1,39 @@ { + "projects": { + "id": "projects", + "name": "Projects", + "description": "All you need to know about Project handling", + "context": "osparc-test-id=newPlusBtn", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=newPlusBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=newPlusMenu", + "title": "Start Sim4Life.lite", + "text": "Clicking on the (+) New button, allows you to create new Sim4Life.lite projects or new Folders to organize the projects", + "placement": "right" + }, { + "anchorEl": "osparc-test-id=searchBarFilter-textField-study", + "title": "Filter and Search", + "text": "This tool allows you to filter Projects and Tutorials.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=studyItemMenuButton", + "action": "open" + }, + "anchorEl": "osparc-test-id=studyItemMenuMenu", + "title": "More options button", + "text": "On the Project card, you can use the three dots button to access more information and operation on the Project.", + "placement": "left" + }, { + "anchorEl": "osparc-test-id=updateStudyBtn", + "title": "Update Services", + "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", + "placement": "bottom" + }] + }, "dashboard": { "id": "dashboard", "name": "Dashboard", @@ -52,39 +87,5 @@ "text": "The User Menu gives you access to Your Account, Preferences, Organizations and more.", "placement": "left" }] - }, - "projects": { - "id": "projects", - "name": "Projects", - "description": "All you need to know about Project handling", - "context": "osparc-test-id=studiesTabBtn", - "steps": [{ - "beforeClick": { - "selector": "osparc-test-id=studiesTabBtn" - }, - "anchorEl": "osparc-test-id=startS4LButton", - "title": "Start Sim4Life.lite", - "text": "Clicking on this (+) Start Sim4Life.lite button, allows you to create and open a new Sim4Life.lite project", - "placement": "right" - }, { - "anchorEl": "osparc-test-id=searchBarFilter-textField-study", - "title": "Filter and Search", - "text": "This tool allows you to filter Projects and Tutorials.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", - "placement": "bottom" - }, { - "beforeClick": { - "selector": "osparc-test-id=studyItemMenuButton", - "action": "open" - }, - "anchorEl": "osparc-test-id=studyItemMenuMenu", - "title": "More options button", - "text": "On the Project card, you can use the three dots button to access more information and operation on the Project.", - "placement": "left" - }, { - "anchorEl": "osparc-test-id=updateStudyBtn", - "title": "Update Services", - "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", - "placement": "bottom" - }] } } diff --git a/services/static-webserver/client/source/resource/osparc/trash-text.svg b/services/static-webserver/client/source/resource/osparc/trash-text.svg new file mode 100644 index 00000000000..0cbfe135be5 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/trash-text.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/services/static-webserver/client/source/resource/osparc/ui_config.json b/services/static-webserver/client/source/resource/osparc/ui_config.json new file mode 100644 index 00000000000..826f64cf9a9 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/ui_config.json @@ -0,0 +1,166 @@ +{ + "tis": { + "newStudies": { + "categories": [{ + "id": "precomputed", + "title": "Precomputed" + }, { + "id": "personalized", + "title": "Personalized", + "description": "In the process, TIP will launch simulations on AWS.
The associated resource costs will be deduced from your Credits." + }], + "resources": [{ + "resourceType": "template", + "expectedTemplateLabel": "TI Planning Tool", + "title": "Classic TI", + "newStudyLabel": "Classic TI", + "category": "precomputed", + "idToWidget": "newTIPlanButton" + }, { + "resourceType": "template", + "expectedTemplateLabel": "mcTI Planning Tool", + "title": "Multichannel TI", + "newStudyLabel": "Multichannel TI", + "category": "precomputed" + }, { + "resourceType": "template", + "expectedTemplateLabel": "pmTI Planning Tool", + "title": "Phase-modulation TI", + "newStudyLabel": "Phase-modulation TI", + "category": "precomputed" + }, { + "resourceType": "template", + "expectedTemplateLabel": "personalized TI Planning Tool", + "title": "Personalized
Classic TI", + "newStudyLabel": "Personalized Classic TI", + "category": "personalized", + "idToWidget": "newPTIPlanButton" + }, { + "resourceType": "template", + "expectedTemplateLabel": "personalized mcTI Planning Tool", + "title": "Personalized
Multichannel TI", + "newStudyLabel": "Personalized Multichannel TI", + "category": "personalized" + }, { + "resourceType": "template", + "expectedTemplateLabel": "personalized pmTI Planning Tool", + "title": "Personalized
Phase-modulation TI", + "newStudyLabel": "Personalized Phase-modulation TI", + "category": "personalized" + }] + } + }, + "tiplite": { + "newStudies": { + "categories": [{ + "id": "precomputed", + "title": "Precomputed" + }, { + "id": "personalized", + "title": "Personalized", + "description": "In the process, TIP will launch simulations on AWS.
The associated resource costs will be deduced from your Credits." + }], + "resources": [{ + "resourceType": "template", + "expectedTemplateLabel": "TI Planning Tool", + "title": "Classic TI", + "newStudyLabel": "Classic TI", + "category": "precomputed", + "idToWidget": "newTIPlanButton" + }, { + "showDisabled": true, + "reason": "Not available in ${replace_me_product_name}", + "title": "Multichannel TI", + "category": "precomputed" + }, { + "showDisabled": true, + "reason": "Not available in ${replace_me_product_name}", + "title": "Phase-modulation TI", + "category": "precomputed" + }, { + "showDisabled": true, + "reason": "Not available in ${replace_me_product_name}", + "title": "Personalized
Classic TI", + "category": "personalized", + "idToWidget": "newPTIPlanButton" + }, { + "showDisabled": true, + "reason": "Not available in ${replace_me_product_name}", + "title": "Personalized
Multichannel TI", + "category": "personalized" + }, { + "showDisabled": true, + "reason": "Not available in ${replace_me_product_name}", + "title": "Personalized
Phase-modulation TI", + "category": "personalized" + }] + } + }, + "s4l": { + "plusButton": { + "resources": [{ + "resourceType": "service", + "expectedKey": "simcore/services/dynamic/s4l-ui", + "title": "Sim4Life", + "newStudyLabel": "New Sim4Life", + "idToWidget": "startS4LButton" + }, { + "resourceType": "service", + "expectedKey": "simcore/services/dynamic/s4l-jupyter", + "title": "Jupyter Lab", + "newStudyLabel": "New S4L Jupyter Lab" + }, { + "resourceType": "service", + "expectedKey": "simcore/services/dynamic/iseg-web", + "title": "Segmentation", + "newStudyLabel": "Segmentation" + }] + } + }, + "s4lacad": { + "plusButton": { + "resources": [{ + "resourceType": "service", + "expectedKey": "simcore/services/dynamic/s4l-ui", + "title": "Sim4Life", + "newStudyLabel": "New Sim4Life", + "idToWidget": "startS4LButton" + }, { + "resourceType": "service", + "expectedKey": "simcore/services/dynamic/s4l-jupyter", + "title": "Jupyter Lab", + "newStudyLabel": "New S4L Jupyter Lab" + }, { + "resourceType": "service", + "expectedKey": "simcore/services/dynamic/iseg-web", + "title": "Segmentation", + "newStudyLabel": "Segmentation" + }] + } + }, + "s4llite": { + "plusButton": { + "resources": [{ + "resourceType": "service", + "expectedKey": "simcore/services/dynamic/s4l-ui-lite", + "title": "${replace_me_product_name}", + "newStudyLabel": "New Project", + "idToWidget": "startS4LButton" + }] + } + }, + "osparc": { + "plusButton": { + "resources": [{ + "resourceType": "study", + "icon": "@FontAwesome5Solid/file/18", + "title": "Empty Study", + "newStudyLabel": "New Study", + "idToWidget": "emptyStudyBtn" + }, { + "resourceType": "service", + "myMostUsed": 2 + }] + } + } +} diff --git a/services/static-webserver/client/source/resource/schemas/product-ui.json b/services/static-webserver/client/source/resource/schemas/product-ui.json new file mode 100644 index 00000000000..a1454b8e6dd --- /dev/null +++ b/services/static-webserver/client/source/resource/schemas/product-ui.json @@ -0,0 +1,97 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "plusButton": { + "$ref": "#/definitions/buttonConfig" + }, + "newStudies": { + "$ref": "#/definitions/buttonConfig" + } + }, + "additionalProperties": false, + "definitions": { + "buttonConfig": { + "type": "object", + "properties": { + "categories": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "string" }, + "title": { "type": "string" }, + "description": { "type": "string" } + }, + "required": ["id", "title"] + } + }, + "resources": { + "type": "array", + "items": { + "oneOf": [{ + "type": "object", + "properties": { + "resourceType": { "enum": ["study"] }, + "title": { "type": "string" }, + "icon": { "type": "string" }, + "newStudyLabel": { "type": "string" }, + "idToWidget": { "type": "string" } + }, + "required": ["resourceType", "title"] + }, { + "type": "object", + "properties": { + "resourceType": { "enum": ["template"] }, + "expectedTemplateLabel": { "type": "string" }, + "title": { "type": "string" }, + "icon": { "type": "string" }, + "newStudyLabel": { "type": "string" }, + "category": { "type": "string" }, + "idToWidget": { "type": "string" } + }, + "required": ["resourceType", "expectedTemplateLabel", "title"] + }, { + "type": "object", + "properties": { + "resourceType": { "enum": ["service"] }, + "expectedKey": { "type": "string" }, + "title": { "type": "string" }, + "icon": { "type": "string" }, + "newStudyLabel": { "type": "string" }, + "category": { "type": "string" }, + "idToWidget": { "type": "string" } + }, + "required": ["resourceType", "expectedKey", "title"] + }, { + "type": "object", + "properties": { + "resourceType": { "enum": ["service"] }, + "myMostUsed": { "type": "integer" }, + "category": { "type": "string" } + }, + "required": ["resourceType", "myMostUsed"] + }, { + "type": "object", + "properties": { + "showDisabled": { + "type": "boolean", + "enum": [true] + }, + "title": { "type": "string" }, + "icon": { "type": "string" }, + "reason": { "type": "string" }, + "newStudyLabel": { "type": "string" }, + "category": { "type": "string" }, + "idToWidget": { "type": "string" } + }, + "required": ["showDisabled", "title"] + }] + }, + "additionalProperties": false + } + }, + "additionalProperties": false + } + } +} diff --git a/services/static-webserver/client/source/translation/en.po b/services/static-webserver/client/source/translation/en.po new file mode 100644 index 00000000000..cdb1069d242 --- /dev/null +++ b/services/static-webserver/client/source/translation/en.po @@ -0,0 +1,4411 @@ +#: osparc/Application.js +msgid "You have been logged out" +msgstr "" + +#: osparc/Application.js +msgid "New Version Released" +msgstr "" + +#: osparc/Application.js +msgid "Privacy Policy" +msgstr "" + +#: osparc/Application.js +msgid "Privacy Policy and License Terms" +msgstr "" + +#: osparc/Application.js +msgid "You have been logged out" +msgstr "" + +#: osparc/desktop/credits/ResourceInTableViewer.js +msgid "Reload" +msgstr "" + +#: osparc/desktop/credits/UsageTable.js +msgid "Service" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "Copy to clipboard" +msgstr "" + +#: osparc/utils/Utils.js +msgid "Today" +msgstr "" + +#: osparc/utils/Utils.js +msgid "Yesterday" +msgstr "" + +#: osparc/utils/Utils.js +msgid "Tomorrow" +msgstr "" + +#: osparc/utils/Utils.js +msgid "Your account will expire today." +msgstr "" + +#: osparc/utils/Utils.js +msgid "Your account will expire tomorrow." +msgstr "" + +#: osparc/utils/Utils.js +msgid "Your account will expire in " +msgstr "" + +#: osparc/utils/Utils.js +msgid " days." +msgstr "" + +#: osparc/utils/Utils.js +msgid "Please contact us via email:" +msgstr "" + +#: osparc/utils/Utils.js +msgid "To use all " +msgstr "" + +#: osparc/utils/Utils.js +msgid ", please send us an e-mail to create an account:" +msgstr "" + +#: osparc/utils/Utils.js +msgid "Copied to clipboard" +msgstr "" + +#: osparc/WindowSizeTracker.js +msgid "This app performs better for larger window size: " +msgstr "" + +#: osparc/WindowSizeTracker.js +msgid "Touchscreen devices are not supported yet." +msgstr "" + +#: osparc/auth/Manager.js +msgid "Unsuccessful Login" +msgstr "" + +#: osparc/ErrorPage.js +msgid "Support email" +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "Log in" +msgstr "" + +#: osparc/store/Support.js +msgid "Please send us an email to:" +msgstr "" + +#: osparc/product/Utils.js +msgid "projects" +msgstr "" + +#: osparc/product/Utils.js +msgid "project" +msgstr "" + +#: osparc/product/Utils.js +msgid "studies" +msgstr "" + +#: osparc/product/Utils.js +msgid "study" +msgstr "" + +#: osparc/product/Utils.js +msgid "tutorials" +msgstr "" + +#: osparc/product/Utils.js +msgid "tutorial" +msgstr "" + +#: osparc/product/Utils.js +msgid "templates" +msgstr "" + +#: osparc/product/Utils.js +msgid "template" +msgstr "" + +#: osparc/product/Utils.js +msgid "service" +msgstr "" + +#: osparc/product/Utils.js +msgid "services" +msgstr "" + +#: osparc/NewRelease.js +msgid "We are pleased to announce that some new features were deployed for you!" +msgstr "" + +#: osparc/NewRelease.js +msgid "What's new in " +msgstr "" + +#: osparc/CookiePolicy.js +msgid "This website applies cookies to personalize your experience and to make our site easier to navigate. By visiting the site, you agree to the " +msgstr "" + +#: osparc/CookiePolicy.js +msgid "By visiting the site, you agree to the " +msgstr "" + +#: osparc/CookiePolicy.js +msgid "It also uses third party software and libraries. By visiting the site, you agree to the " +msgstr "" + +#: osparc/CookiePolicy.js +msgid "Licensing." +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Accept" +msgstr "" + +#: osparc/data/Resources.js +msgid "You have been logged out. Your cookie might have expired." +msgstr "" + +#: osparc/FlashMessenger.js +msgid "Oops... something went wrong" +msgstr "" + +#: osparc/FlashMessenger.js +msgid "No message" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Do you want to close " +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Make sure you saved your changes to:" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "- current smash file" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "- current notebooks (jupyterlab session will be terminated)" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Close" +msgstr "" + +#: osparc/share/CollaboratorsStudy.js +msgid "Yes" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Do you want to save and close " +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Save & Close" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Platform logger" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Started template creation and added to the background tasks" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Something went wrong while duplicating the study
" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Loading Snapshot" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Closing previous snapshot..." +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "No snapshot found" +msgstr "" + +#: osparc/desktop/MainPageHandler.js +msgid "Study not found" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Loading Iteration" +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Closing..." +msgstr "" + +#: osparc/desktop/MainPage.js +msgid "Iteration not found" +msgstr "" + +#: osparc/MaintenanceTracker.js +msgid "The service is under maintenance. Please check back later" +msgstr "" + +#: osparc/CookieExpirationTracker.js +msgid "Your session has expired" +msgstr "" + +#: osparc/NewUITracker.js +msgid "A new version of the application is now available." +msgstr "" + +#: osparc/NewUITracker.js +msgid "Click the Reload button to get the latest features." +msgstr "" + +#: osparc/ui/switch/ThemeSwitcher.js +msgid "Dark theme" +msgstr "" + +#: osparc/ui/switch/ThemeSwitcher.js +msgid "Light theme" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Ten Simple Rules" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "TSR:" +msgstr "" + +#: osparc/share/NewCollaboratorsManager.js +msgid "Share" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "App mode" +msgstr "" + +#: osparc/service/ServiceListItem.js +msgid "Hits: " +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Unaccessible service(s):" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Service(s) retired, please update" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Service(s) deprecated, please update" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Update available" +msgstr "" + +#: osparc/navigation/PrevNextButtons.js +msgid "Running" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Ran successfully" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Run aborted" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Unsuccessful Run" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "A user" +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid " is closing it..." +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid " is cloning it..." +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid " is exporting it..." +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid " is opening it..." +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid " is using it." +msgstr "" + +#: osparc/dashboard/CardBase.js +msgid "Embargoed
Credits Required" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Empty" +msgstr "" + +#: osparc/product/quickStart/s4llite/Slides.js +msgid "Quick Start" +msgstr "" + +#: osparc/tours/Manager.js +msgid "Guided Tours" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Information" +msgstr "" + +#: osparc/store/Support.js +msgid "To create an issue, you must have an account and be already logged-in." +msgstr "" + +#: osparc/store/Support.js +msgid "Continue" +msgstr "" + +#: osparc/store/Support.js +msgid "Log in in " +msgstr "" + +#: osparc/store/Support.js +msgid "Release Notes" +msgstr "" + +#: osparc/store/Support.js +msgid "Registration is currently only available with an invitation." +msgstr "" + +#: osparc/store/Support.js +msgid "Please request access to " +msgstr "" + +#: osparc/store/Data.js +msgid "Oops... more than 10.000 items to be listed here. Maybe it's time to make a folder :)." +msgstr "" + +#: osparc/store/Data.js +msgid "Unsuccessful file copy" +msgstr "" + +#: osparc/store/Data.js +msgid "Unsuccessful file deletion" +msgstr "" + +#: osparc/notification/RibbonNotification.js +msgid "Maintenance scheduled." +msgstr "" + +#: osparc/notification/RibbonNotification.js +msgid "Please save your work and logout." +msgstr "" + +#: osparc/notification/RibbonNotification.js +msgid "Oops, your window is a bit small!" +msgstr "" + +#: osparc/product/quickStart/Utils.js +msgid "Don't show again" +msgstr "" + +#: osparc/TooSmallDialog.js +msgid "Window size too small" +msgstr "" + +#: osparc/TooSmallDialog.js +msgid "The application can't perform in such a small window." +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "Exit" +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "Log out" +msgstr "" + +#: osparc/desktop/credits/Utils.js +msgid "You can't access this information" +msgstr "" + +#: osparc/desktop/credits/Utils.js +msgid "You can't access these operations" +msgstr "" + +#: osparc/share/Collaborators.js +msgid "Any logged-in user with access to the " +msgstr "" + +#: osparc/share/Collaborators.js +msgid " can open it" +msgstr "" + +#: osparc/share/Collaborators.js +msgid "Copy link" +msgstr "" + +#: osparc/share/Collaborators.js +msgid "Anyone on the internet with the link can open this " +msgstr "" + +#: osparc/share/Collaborators.js +msgid "Any logged-in user with the link can copy and open this " +msgstr "" + +#: osparc/dashboard/SearchBarFilter.js +msgid "Shared with" +msgstr "" + +#: osparc/share/Collaborators.js +msgid "Collaborator can't be removed:" +msgstr "" + +#: osparc/share/Collaborators.js +msgid " needs at least one owner." +msgstr "" + +#: osparc/share/Collaborators.js +msgid "You might want to delete it instead." +msgstr "" + +#: osparc/ui/list/MemberListItem.js +msgid "Leave" +msgstr "" + +#: osparc/auth/core/Utils.js +msgid "Passwords do not match" +msgstr "" + +#: osparc/auth/core/Utils.js +msgid "Invalid phone number. Please [+][country code][phone number]" +msgstr "" + +#: osparc/auth/core/Utils.js +msgid "Invalid email address.
Please register using your university email address" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Cancel" +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "Submit" +msgstr "" + +#: osparc/form/json/JsonSchemaForm.js +msgid "There was an issue generating the form or one or more schemas failed to validate. Check your Javascript console for more details." +msgstr "" + +#: osparc/auth/LoginWithDecorators.js +msgid "Your account has been created.
You can now use your credentials to login." +msgstr "" + +#: osparc/navigation/NavigationBar.js +msgid "This is TIP.lite, a light version of TIP.
Request access to TIP." +msgstr "" + +#: osparc/product/quickStart/tis/Dashboard.js +msgid "Dashboard" +msgstr "" + +#: osparc/navigation/NavigationBar.js +msgid "Read only" +msgstr "" + +#: osparc/ui/window/Confirmation.js +msgid "Confirmation" +msgstr "" + +#: osparc/snapshots/Loading.js +msgid "Loading " +msgstr "" + +#: osparc/desktop/MainPageHandler.js +msgid "is already open by" +msgstr "" + +#: osparc/desktop/MainPageHandler.js +msgid "another user." +msgstr "" + +#: osparc/desktop/MainPageHandler.js +msgid "We encountered an issue with the" +msgstr "" + +#: osparc/desktop/MainPageHandler.js +msgid "Please contact support." +msgstr "" + +#: osparc/dashboard/Dashboard.js +msgid "SERVICES" +msgstr "" + +#: osparc/dashboard/Dashboard.js +msgid "DATA" +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "You do not have writing permissions.
Your changes will not be saved." +msgstr "" + +#: osparc/data/model/IframeHandler.js +msgid "Starting" +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "Opening " +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid " is already opened" +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "Error opening study" +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "The Study contains more than " +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid " Interactive services." +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "Please start them manually." +msgstr "" + +#: osparc/desktop/SlideshowToolbar.js +msgid "Edit App Mode" +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "The pipeline is up-to-date. Do you want to re-run it?" +msgstr "" + +#: osparc/widget/PreparingInputs.js +msgid "Re-run" +msgstr "" + +#: osparc/desktop/StartStopButtons.js +msgid "Run" +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "Take Snapshot" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Checkpoints" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Iterations" +msgstr "" + +#: osparc/desktop/StudyEditor.js +msgid "Error saving the study" +msgstr "" + +#: osparc/data/PollTask.js +msgid "Unsuccessful polling status" +msgstr "" + +#: osparc/dashboard/StudyBrowserHeader.js +msgid "My Workspace" +msgstr "" + +#: osparc/study/Utils.js +msgid "CREATING " +msgstr "" + +#: osparc/service/Utils.js +msgid "Parameter" +msgstr "" + +#: osparc/service/Utils.js +msgid "File" +msgstr "" + +#: osparc/service/Utils.js +msgid "Iterator" +msgstr "" + +#: osparc/service/Utils.js +msgid "Computational" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Interactive" +msgstr "" + +#: osparc/service/Utils.js +msgid "Probe" +msgstr "" + +#: osparc/service/Utils.js +msgid "Service deprecated" +msgstr "" + +#: osparc/service/Utils.js +msgid "Please go back to the dashboard and Update the Service or download its data and upload it to an updated version" +msgstr "" + +#: osparc/service/Utils.js +msgid "Please instantiate an updated version" +msgstr "" + +#: osparc/service/Utils.js +msgid "Service retired" +msgstr "" + +#: osparc/service/Utils.js +msgid "Please download the Service data and upload it to an updated version" +msgstr "" + +#: osparc/service/Utils.js +msgid "Please Stop the Service and then Update it" +msgstr "" + +#: osparc/service/Utils.js +msgid "Please Update the Service" +msgstr "" + +#: osparc/service/Utils.js +msgid "It will be Retired: " +msgstr "" + +#: osparc/ui/basic/NodeStatusUI.js +msgid "Idle" +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Waiting for resources" +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Unsuccessful" +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Ready" +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Deprecated" +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Retired" +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Starting..." +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Stopping..." +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Pending..." +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Pulling..." +msgstr "" + +#: osparc/service/StatusUI.js +msgid "Connecting..." +msgstr "" + +#: osparc/task/Export.js +msgid "Exporting " +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "Details" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Credits required" +msgstr "" + +#: osparc/study/StudyOptions.js +msgid "Open" +msgstr "" + +#: osparc/dashboard/NewPlusButton.js +msgid "New" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Some of your services are outdated. Please update to the latest version for better performance.\\\\\\\\n\\\\\\\\nDo you want to update now?" +msgstr "" + +#: osparc/workbench/ServiceCatalog.js +msgid "Version" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid " Files..." +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Overview" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Billing Settings" +msgstr "" + +#: osparc/node/TierSelectionView.js +msgid "Tiers" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Pipeline View" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Comments" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Sharing" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Classifiers" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Quality" +msgstr "" + +#: osparc/desktop/credits/UsageTable.js +msgid "Tags" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Services Updates" +msgstr "" + +#: osparc/node/BootOptionsView.js +msgid "Boot Options" +msgstr "" + +#: osparc/dashboard/ResourceDetails.js +msgid "Publish " +msgstr "" + +#: osparc/dashboard/ResourceBrowserBase.js +msgid "You need to be logged in to create a study" +msgstr "" + +#: osparc/dashboard/ResourceBrowserBase.js +msgid "Group" +msgstr "" + +#: osparc/dashboard/ResourceBrowserBase.js +msgid "None" +msgstr "" + +#: osparc/dashboard/ResourceBrowserBase.js +msgid "Grid view" +msgstr "" + +#: osparc/dashboard/ResourceBrowserBase.js +msgid "List view" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Creating " +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "Share..." +msgstr "" + +#: osparc/dashboard/ResourceBrowserBase.js +msgid "Tags..." +msgstr "" + +#: osparc/widget/StudyDataManager.js +msgid " Files" +msgstr "" + +#: osparc/tours/Manager.js +msgid "This collection of Guided Tours will show you how to use the platform:" +msgstr "" + +#: osparc/desktop/credits/BuyCreditsStepper.js +msgid "Payment " +msgstr "" + +#: osparc/share/CollaboratorsWorkspace.js +msgid "Workspace successfully shared" +msgstr "" + +#: osparc/share/CollaboratorsWorkspace.js +msgid "Something went wrong while sharing the workspace" +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid " successfully removed" +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "Something went wrong while removing " +msgstr "" + +#: osparc/share/CollaboratorsStudy.js +msgid "Demote" +msgstr "" + +#: osparc/share/CollaboratorsTag.js +msgid "Tag successfully shared" +msgstr "" + +#: osparc/share/CollaboratorsTag.js +msgid "Something went wrong while sharing the tag" +msgstr "" + +#: osparc/data/Roles.js +msgid "Restricted Member" +msgstr "" + +#: osparc/data/Roles.js +msgid "Restricted member: no Read access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can access content shared within the Organization" +msgstr "" + +#: osparc/data/Roles.js +msgid "Member" +msgstr "" + +#: osparc/data/Roles.js +msgid "Member: Read access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can see other members" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can share with other members" +msgstr "" + +#: osparc/data/Roles.js +msgid "Manager" +msgstr "" + +#: osparc/data/Roles.js +msgid "Manager: Read/Write access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can Add/Delete members" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can Promote/Demote members" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can Edit Organization details" +msgstr "" + +#: osparc/data/Roles.js +msgid "Administrator" +msgstr "" + +#: osparc/data/Roles.js +msgid "Admin: Read/Write/Delete access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can Delete the Organization" +msgstr "" + +#: osparc/desktop/credits/CheckoutsTable.js +msgid "User" +msgstr "" + +#: osparc/data/Roles.js +msgid "User: Read access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can open it" +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "Editor" +msgstr "" + +#: osparc/data/Roles.js +msgid "Editor: Read/Write access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can make changes" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can share it" +msgstr "" + +#: osparc/dashboard/SortedByMenuButton.js +msgid "Owner" +msgstr "" + +#: osparc/data/Roles.js +msgid "Owner: Read/Write/Delete access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can delete it" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can use it" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can use the credits" +msgstr "" + +#: osparc/data/Roles.js +msgid "Accountant" +msgstr "" + +#: osparc/data/Roles.js +msgid "Accountant: Read/Write access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can Edit Credit Account details" +msgstr "" + +#: osparc/data/Roles.js +msgid "Viewer" +msgstr "" + +#: osparc/data/Roles.js +msgid "Viewer: Read access" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can inspect the content and open " +msgstr "" + +#: osparc/data/Roles.js +msgid "- can add " +msgstr "" + +#: osparc/data/Roles.js +msgid "- can add folders" +msgstr "" + +#: osparc/data/Roles.js +msgid "- can rename workspace" +msgstr "" + +#: osparc/data/Roles.js +msgid "Roles" +msgstr "" + +#: osparc/share/AddCollaborators.js +msgid "Share with..." +msgstr "" + +#: osparc/share/AddCollaborators.js +msgid "My Organizations..." +msgstr "" + +#: osparc/share/AddCollaborators.js +msgid "Publish for" +msgstr "" + +#: osparc/share/CollaboratorsStudy.js +msgid " successfully shared" +msgstr "" + +#: osparc/share/CollaboratorsStudy.js +msgid "Something went wrong while sharing the " +msgstr "" + +#: osparc/share/CollaboratorsStudy.js +msgid "Sharee permissions" +msgstr "" + +#: osparc/ui/form/FileInput.js +msgid "Select File..." +msgstr "" + +#: osparc/auth/ui/LoginView.js +msgid "Sign in" +msgstr "" + +#: osparc/auth/ui/LoginView.js +msgid "Create Account" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Request Account" +msgstr "" + +#: osparc/auth/ui/LoginView.js +msgid "Forgot Password?" +msgstr "" + +#: osparc/auth/ui/LoginView.js +msgid "Disclaimer" +msgstr "" + +#: osparc/ui/basic/PoweredByOsparc.js +msgid "powered by" +msgstr "" + +#: osparc/auth/ui/LoginView.js +msgid "email or password don't look correct" +msgstr "" + +#: osparc/auth/ui/RegistrationView.js +msgid "Registration" +msgstr "" + +#: osparc/auth/ui/RegistrationView.js +msgid "Type your email" +msgstr "" + +#: osparc/auth/ui/RegistrationView.js +msgid "Type a password" +msgstr "" + +#: osparc/auth/ui/RegistrationView.js +msgid "Retype the password" +msgstr "" + +#: osparc/auth/ui/RegistrationView.js +msgid "Cannot register user" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "First Name" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Last Name" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "University Email" +msgstr "" + +#: osparc/po/Users.js +msgid "Email" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Phone Number" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Company Name" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "University" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Organization" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Research Group/Organization" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Address" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "City" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Postal code" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Country" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Application" +msgstr "" + +#: osparc/pricing/PlanEditor.js +msgid "Description" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "How did you hear about us?" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "I acknowledge that data will be processed in accordance to " +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Request" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "The request is being processed, you will hear from us in the coming hours" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Reload Captcha" +msgstr "" + +#: osparc/auth/ui/RequestAccount.js +msgid "Type the 6 digits:" +msgstr "" + +#: osparc/auth/ui/VerifyPhoneNumberView.js +msgid "Two-Factor Authentication (2FA)" +msgstr "" + +#: osparc/auth/ui/VerifyPhoneNumberView.js +msgid "A text message will be sent to your mobile phone for authentication each time you log in." +msgstr "" + +#: osparc/auth/ui/VerifyPhoneNumberView.js +msgid "Send SMS" +msgstr "" + +#: osparc/auth/ui/VerifyPhoneNumberView.js +msgid "Type the SMS code" +msgstr "" + +#: osparc/file/FileDownloadLink.js +msgid "Validate" +msgstr "" + +#: osparc/auth/ui/VerifyPhoneNumberView.js +msgid "Skip phone registration and send code via email" +msgstr "" + +#: osparc/auth/ui/Login2FAValidationCodeView.js +msgid "Invalid code" +msgstr "" + +#: osparc/auth/ui/ResetPassView.js +msgid "Reset Password" +msgstr "" + +#: osparc/auth/ui/ResetPassRequestView.js +msgid "Type your registration email" +msgstr "" + +#: osparc/auth/ui/ResetPassRequestView.js +msgid "Could not request password reset" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Your new password" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Retype your new password" +msgstr "" + +#: osparc/auth/ui/ResetPassView.js +msgid "Could not reset password" +msgstr "" + +#: osparc/auth/ui/Login2FAValidationCodeView.js +msgid "Type code" +msgstr "" + +#: osparc/auth/ui/Login2FAValidationCodeView.js +msgid "Didn't receive the code? Click to resend" +msgstr "" + +#: osparc/auth/ui/Login2FAValidationCodeView.js +msgid "Via SMS" +msgstr "" + +#: osparc/auth/ui/Login2FAValidationCodeView.js +msgid "Via email" +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "Access Full TIP" +msgstr "" + +#: osparc/ui/basic/PoweredByOsparc.js +msgid "powered by " +msgstr "" + +#: osparc/workbench/BaseNodeUI.js +msgid "Information..." +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Convert to Pipeline" +msgstr "" + +#: osparc/navigation/StudyTitleWOptions.js +msgid "Convert to Standalone" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Restore" +msgstr "" + +#: osparc/navigation/StudyTitleWOptions.js +msgid "Platform Logs..." +msgstr "" + +#: osparc/task/TasksButton.js +msgid "Tasks" +msgstr "" + +#: osparc/jobs/JobsBrowser.js +msgid "Jobs" +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "My Account" +msgstr "" + +#: osparc/desktop/credits/BillingCenter.js +msgid "Credit Accounts" +msgstr "" + +#: osparc/desktop/credits/BillingCenter.js +msgid "Payment Methods" +msgstr "" + +#: osparc/desktop/credits/BillingCenter.js +msgid "Payments" +msgstr "" + +#: osparc/desktop/account/MyAccount.js +msgid "Usage" +msgstr "" + +#: osparc/desktop/credits/BillingCenter.js +msgid "Purchases" +msgstr "" + +#: osparc/desktop/credits/BillingCenter.js +msgid "Checkouts" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "username" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Expiration date:" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Please contact us via email:
" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Avoid dots or numbers in text" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Update Profile" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Profile updated" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Unsuccessful profile update" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Privacy" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "For Privacy reasons, you might want to hide some personal data." +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Update Privacy" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Name is required" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Set the Name first" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Privacy updated" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Unsuccessful privacy update" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "If all searchable fields are hidden, you will not be discoverable." +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Two-Factor Authentication" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Set your preferred method to use for two-factor authentication when signing in:" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "You are about to disable the 2FA" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid " The Two-Factor Authentication is one more measure to prevent hackers from accessing your account with an additional layer of security. When you sign in, 2FA helps make sure that your resources and personal information stays private, safe and secure. " +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Yes, disable" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "2FA Method" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Password" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Your current password" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Unsuccessful password reset" +msgstr "" + +#: osparc/desktop/account/ProfilePage.js +msgid "Danger Zone" +msgstr "" + +#: osparc/desktop/account/DeleteAccount.js +msgid "Delete Account" +msgstr "" + +#: osparc/store/Services.js +msgid "Unable to fetch Services" +msgstr "" + +#: osparc/store/Services.js +msgid "Some services are inaccessible:
" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Successfully deleted" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Successfully restored" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "The permissions will be taken from the new workspace." +msgstr "" + +#: osparc/dashboard/MoveResourceTo.js +msgid "Move" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Move to..." +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Are you sure you want to delete the Folder and all its content?" +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "It will be permanently deleted after " +msgstr "" + +#: osparc/editor/AnnotationEditor.js +msgid "Delete" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Start with an empty study" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "New Plan" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Delete permanently" +msgstr "" + +#: osparc/vipMarket/LicensedItemDetails.js +msgid "Import" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Import Study" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Move to" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Cancel Selection" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Select " +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Open location" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Rename..." +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Thumbnail..." +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Edit Thumbnail" +msgstr "" + +#: osparc/study/StudyOptions.js +msgid "Something went wrong while renaming" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Something went wrong while updating the thumbnail" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid " files..." +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Billing Settings..." +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Duplicate" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Something went wrong while converting to pipeline" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Export cMIS" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Duplicate process started and added to the background tasks" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Something went wrong while duplicating" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Preparing files" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Exporting process started and added to the background tasks" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Download started" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Something went wrong while exporting the study" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Uploading file" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Importing process started and added to the background tasks" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Importing Study..." +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Processing study" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Something went wrong while fetching the study" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Something went wrong while importing the study" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Are you sure you want to delete" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "will be removed from your list. Collaborators will still have access." +msgstr "" + +#: osparc/form/json/JsonSchemaFormHeader.js +msgid "Remove" +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Duplicating " +msgstr "" + +#: osparc/dashboard/StudyBrowser.js +msgid "Duplication cancelled" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Update all" +msgstr "" + +#: osparc/dashboard/TemplateBrowser.js +msgid "Are you sure you want to update all " +msgstr "" + +#: osparc/node/LifeCycleView.js +msgid "Update" +msgstr "" + +#: osparc/desktop/organizations/OrganizationsList.js +msgid "Are you sure you want to delete " +msgstr "" + +#: osparc/dashboard/TemplateBrowser.js +msgid "Study to Template cancelled" +msgstr "" + +#: osparc/dashboard/TemplateBrowser.js +msgid "Something went wrong while publishing the study
" +msgstr "" + +#: osparc/dashboard/TemplateBrowser.js +msgid "Publishing " +msgstr "" + +#: osparc/dashboard/ServiceBrowser.js +msgid "It seems you don't have access to this product." +msgstr "" + +#: osparc/dashboard/ServiceBrowser.js +msgid "Please contact us:" +msgstr "" + +#: osparc/dashboard/ServiceBrowser.js +msgid "Test with data" +msgstr "" + +#: osparc/dashboard/ServiceBrowser.js +msgid "Submit new service" +msgstr "" + +#: osparc/dashboard/ServiceBrowser.js +msgid "Submit a new service" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "New Node" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Nodes" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Storage" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Study options" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Service options" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Workbench" +msgstr "" + +#: osparc/node/slideshow/NodeView.js +msgid "Logger" +msgstr "" + +#: osparc/desktop/SlideshowToolbar.js +msgid "App Mode" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Start App Mode" +msgstr "" + +#: osparc/pricing/PlanListItem.js +msgid "Edit" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "Start" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Annotations" +msgstr "" + +#: osparc/editor/AnnotationNoteCreator.js +msgid "Note" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Rectangle" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Text" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Show" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Create" +msgstr "" + +#: osparc/node/slideshow/BaseNodeView.js +msgid "Inputs" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Service data" +msgstr "" + +#: osparc/node/slideshow/BaseNodeView.js +msgid "Outputs" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Options" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Are you sure you want to delete the selected node?" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Delete Node" +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Are you sure you want to delete the selected " +msgstr "" + +#: osparc/desktop/WorkbenchView.js +msgid "Delete Nodes" +msgstr "" + +#: osparc/node/slideshow/BaseNodeView.js +msgid "Settings" +msgstr "" + +#: osparc/desktop/SlideshowView.js +msgid "Are you sure you want to delete node?" +msgstr "" + +#: osparc/widget/logger/LoggerView.js +msgid "Show logs only from current node" +msgstr "" + +#: osparc/filter/TextFilter.js +msgid "Filter" +msgstr "" + +#: osparc/widget/logger/LoggerView.js +msgid "Min log-level" +msgstr "" + +#: osparc/widget/logger/LoggerView.js +msgid "Toggle auto-scroll" +msgstr "" + +#: osparc/widget/logger/LoggerView.js +msgid "Copy logs to clipboard" +msgstr "" + +#: osparc/widget/logger/LoggerView.js +msgid "Copy Selected log to clipboard" +msgstr "" + +#: osparc/widget/logger/LoggerView.js +msgid "Download logs" +msgstr "" + +#: osparc/form/renderer/PropForm.js +msgid "Unlink" +msgstr "" + +#: osparc/navigation/PrevNextButtons.js +msgid "Select File" +msgstr "" + +#: osparc/form/renderer/PropForm.js +msgid "Set new parameter" +msgstr "" + +#: osparc/form/renderer/PropForm.js +msgid "Set existing parameter" +msgstr "" + +#: osparc/form/renderer/PropForm.js +msgid "Required Input" +msgstr "" + +#: osparc/form/renderer/PropForm.js +msgid "Input" +msgstr "" + +#: osparc/widget/NodesSlidesTree.js +msgid "Use the eye icons to display/hide nodes in the App Mode." +msgstr "" + +#: osparc/widget/NodesSlidesTree.js +msgid "Use the up and down arrows to sort them." +msgstr "" + +#: osparc/widget/NodesSlidesTree.js +msgid "You can also display nodes by clicking on them on the Workbench or Nodes list." +msgstr "" + +#: osparc/widget/NodesSlidesTree.js +msgid "Disable" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Save" +msgstr "" + +#: osparc/snapshots/EditSnapshotView.js +msgid "OK" +msgstr "" + +#: osparc/snapshots/IterationsView.js +msgid "Edit Tag" +msgstr "" + +#: osparc/snapshots/SnapshotsView.js +msgid "Edit Snapshot" +msgstr "" + +#: osparc/snapshots/IterationsView.js +msgid "iterations" +msgstr "" + +#: osparc/snapshots/IterationsView.js +msgid "Edit Iteration" +msgstr "" + +#: osparc/desktop/StudyEditorIdlingTracker.js +msgid "Are you still there?" +msgstr "" + +#: osparc/data/model/Workbench.js +msgid "Nodes can't be added while the pipeline is running" +msgstr "" + +#: osparc/data/model/Workbench.js +msgid "Nodes can't be deleted while the pipeline is running" +msgstr "" + +#: osparc/data/model/Workbench.js +msgid "You are not allowed to add nodes" +msgstr "" + +#: osparc/data/model/IframeHandler.js +msgid " is retired" +msgstr "" + +#: osparc/data/model/Workbench.js +msgid "Error creating " +msgstr "" + +#: osparc/data/model/Workbench.js +msgid "File couldn't be assigned" +msgstr "" + +#: osparc/data/model/Workbench.js +msgid "Parameter couldn't be assigned" +msgstr "" + +#: osparc/data/model/Workbench.js +msgid "Probe couldn't be assigned" +msgstr "" + +#: osparc/data/model/Node.js +msgid " ports auto connected" +msgstr "" + +#: osparc/data/model/Node.js +msgid "Do you really want Stop and Save the current state?" +msgstr "" + +#: osparc/desktop/StartStopButtons.js +msgid "Stop" +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Waiting ..." +msgstr "" + +#: osparc/widget/ProgressSequence.js +msgid "CREATING ..." +msgstr "" + +#: osparc/share/CollaboratorsService.js +msgid "Service successfully shared" +msgstr "" + +#: osparc/share/CollaboratorsService.js +msgid "Something went wrong while sharing the service" +msgstr "" + +#: osparc/share/CollaboratorsService.js +msgid "Something went wrong while removing member" +msgstr "" + +#: osparc/share/CollaboratorsService.js +msgid "Operation not available" +msgstr "" + +#: osparc/file/FilePicker.js +msgid "Cancel upload" +msgstr "" + +#: osparc/file/FileLabelWithActions.js +msgid "Download" +msgstr "" + +#: osparc/node/UpdateResourceLimitsView.js +msgid "Reset" +msgstr "" + +#: osparc/file/FileDrop.js +msgid "Provide Link" +msgstr "" + +#: osparc/file/FilePicker.js +msgid "In order to Select a File you have three options:" +msgstr "" + +#: osparc/file/FilePicker.js +msgid "Drop file here" +msgstr "" + +#: osparc/file/FilePicker.js +msgid "Select New File" +msgstr "" + +#: osparc/file/FilePicker.js +msgid "Select Download Link" +msgstr "" + +#: osparc/file/FilePicker.js +msgid "Select File from other " +msgstr "" + +#: osparc/file/FileDownloadLink.js +msgid "Select" +msgstr "" + +#: osparc/task/TaskUI.js +msgid "Are you sure you want to cancel the task?" +msgstr "" + +#: osparc/task/TaskUI.js +msgid "Cancel Task" +msgstr "" + +#: osparc/task/TaskUI.js +msgid "Ignore" +msgstr "" + +#: osparc/dashboard/ResourceUpgradeHelper.js +msgid "Outdated services" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "Copy Raw metadata" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "Copy Service Id" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "SERVICE ID" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "KEY" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "INTEGRATION VERSION" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "VERSION" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "RELEASE DATE" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "CONTACT" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "AUTHORS" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "ACCESS RIGHTS" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "CLASSIFIERS" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "QUALITY" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "Show Description only" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "From all the metadata shown in this view,\\\\\\\\nonly the Description will be shown to Users." +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "Edit Icon" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "Edit Name" +msgstr "" + +#: osparc/info/ServiceLarge.js +msgid "Edit Version Display" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Edit Description" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "An issue occurred while updating the information." +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Title:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Thumbnail:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Description:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Author:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Access:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Created:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Modified:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Tags:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Quality:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Classifiers:" +msgstr "" + +#: osparc/info/StudyLarge.js +msgid "Location:" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Edit Title" +msgstr "" + +#: osparc/study/StudyOptions.js +msgid "Credit Account" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Buy Credits" +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "Transfer from this Credit Account" +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "Last charge:" +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "credits" +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "You don't have access to the last used Credit Account" +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "Top up the Credit Account:
Purchase additional credits to restore a positive balance." +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "Transfer credits from another Account:
Use this Credit Account to cover the negative balance." +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "A credits transfer will be initiated to cover the negative balance:" +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "Credits to transfer: " +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "From: " +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "To: " +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "Transfer" +msgstr "" + +#: osparc/study/BillingSettings.js +msgid "Credit Account saved" +msgstr "" + +#: osparc/service/PricingUnitsList.js +msgid "No Tiers found" +msgstr "" + +#: osparc/info/CommentsList.js +msgid "0 Comments" +msgstr "" + +#: osparc/info/CommentsList.js +msgid "Load more comments..." +msgstr "" + +#: osparc/info/CommentsList.js +msgid "1 Comment" +msgstr "" + +#: osparc/info/CommentsList.js +msgid " Comments" +msgstr "" + +#: osparc/info/CommentAdd.js +msgid "Add comment" +msgstr "" + +#: osparc/editor/AnnotationNoteCreator.js +msgid "Add" +msgstr "" + +#: osparc/metadata/ClassifiersEditor.js +msgid "RRID:" +msgstr "" + +#: osparc/metadata/ClassifiersEditor.js +msgid "Add Classifier" +msgstr "" + +#: osparc/metadata/ClassifiersEditor.js +msgid "RRID classifier successfully added" +msgstr "" + +#: osparc/metadata/ClassifiersEditor.js +msgid "Classifiers successfully edited" +msgstr "" + +#: osparc/metadata/ClassifiersEditor.js +msgid "Something went wrong while editing classifiers" +msgstr "" + +#: osparc/metadata/ClassifiersViewer.js +msgid "No Classifiers assigned" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Quality Assessment data not found" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "There was an issue validating the metadata." +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Enabled" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Rules" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Conformance Level" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Target" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Conformance Level Target" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "References" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "TSR SCORE" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Not Applicable" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "Edit References" +msgstr "" + +#: osparc/metadata/QualityEditor.js +msgid "There was an issue while updating the Quality Assessment." +msgstr "" + +#: osparc/form/tag/TagManager.js +msgid "Apply Tags" +msgstr "" + +#: osparc/desktop/preferences/pages/TagsPage.js +msgid "New Tag" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "This service is deprecated. Please update." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "This service has been retired. Please update." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Some services are inaccessible. Please contact the service owner:" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Services marked in red are retired and can no longer be used." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "If the Update button is disabled, they might require manual intervention to be updated:" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Open the study" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Click on the retired service, download the data" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Upload the data to a newer version" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Services marked in yellow are deprecated, they will be retired soon." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "They can be updated by pressing the Update button." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "All services are up to date to their latest compatible version." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Click Update to upgrade services to the latest compatible version." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Some services are not up to date." +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Current" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Compatible" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Latest compatible version" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Unknown" +msgstr "" + +#: osparc/metadata/ServicesInStudyUpdate.js +msgid "Up-to-date" +msgstr "" + +#: osparc/metadata/ServicesInStudyBootOpts.js +msgid "Here you can select in which mode the services will be started:" +msgstr "" + +#: osparc/metadata/ServicesInStudyBootOpts.js +msgid "Boot Mode" +msgstr "" + +#: osparc/metadata/ServicesInStudyBootOpts.js +msgid "Select boot type" +msgstr "" + +#: osparc/metadata/ServicesInStudy.js +msgid "Could not retrieve some service information" +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "me" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Ten Simple Rules score" +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "Autostart services" +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "Disabling this will help opening and closing studies/projects faster" +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "Add tags" +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "Meta details" +msgstr "" + +#: osparc/info/StudyUtils.js +msgid "Share with Editors and Organizations" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Quality Assessment" +msgstr "" + +#: osparc/study/SaveAsTemplate.js +msgid "Publish with data" +msgstr "" + +#: osparc/study/SaveAsTemplate.js +msgid "Publish" +msgstr "" + +#: osparc/study/StudyOptions.js +msgid " Options" +msgstr "" + +#: osparc/study/StudyOptions.js +msgid "An issue occurred while selecting Credit Account" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "Title" +msgstr "" + +#: osparc/study/StudyOptions.js +msgid "Advanced options" +msgstr "" + +#: osparc/service/ServiceListItem.js +msgid "Number of times you instantiated it" +msgstr "" + +#: osparc/dashboard/GridButtonItem.js +msgid "Viewer only" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Last modified" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Deleted" +msgstr "" + +#: osparc/dashboard/ListButtonItem.js +msgid "Click to filter by this Tag" +msgstr "" + +#: osparc/dashboard/ListButtonItem.js +msgid "More..." +msgstr "" + +#: osparc/dashboard/SearchBarFilter.js +msgid "All " +msgstr "" + +#: osparc/dashboard/SearchBarFilter.js +msgid "My " +msgstr "" + +#: osparc/dashboard/SearchBarFilter.js +msgid "Shared with Me" +msgstr "" + +#: osparc/dashboard/SearchBarFilter.js +msgid "Shared with Everyone" +msgstr "" + +#: osparc/dashboard/SearchBarFilter.js +msgid "search" +msgstr "" + +#: osparc/dashboard/SearchBarFilter.js +msgid "Service Type" +msgstr "" + +#: osparc/dashboard/StudyBrowserHeader.js +msgid "Recently Deleted" +msgstr "" + +#: osparc/pricing/PlanDetails.js +msgid "Services" +msgstr "" + +#: osparc/dashboard/ResourceFilter.js +msgid "Shared" +msgstr "" + +#: osparc/dashboard/ResourceFilter.js +msgid "All Tags..." +msgstr "" + +#: osparc/dashboard/ResourceFilter.js +msgid "Less Tags..." +msgstr "" + +#: osparc/dashboard/ResourceFilter.js +msgid "Edit Tags..." +msgstr "" + +#: osparc/study/NodePricingUnits.js +msgid "Cannot change Tier" +msgstr "" + +#: osparc/file/TreeFolderView.js +msgid "Calculating Size" +msgstr "" + +#: osparc/file/TreeFolderView.js +msgid "Total size: " +msgstr "" + +#: osparc/product/quickStart/s4l/Welcome.js +msgid "Welcome to Sim4Life" +msgstr "" + +#: osparc/product/quickStart/s4lacad/Welcome.js +msgid "Experience Most Advanced Simulations – All In The Cloud" +msgstr "" + +#: osparc/product/quickStart/s4llite/Welcome.js +msgid "Welcome onboard " +msgstr "" + +#: osparc/product/quickStart/s4lacad/Welcome.js +msgid " Sim4Life is a revolutionary simulation platform, combining computable human phantoms with the most powerful physics solvers and the most advanced tissue models, for directly analyzing biological real-world phenomena and complex technical devices in a validated biological and anatomical environment.

In order to facilitate the introduction to the platform, we have some Guided Tours that can be found under the User Menu.

For more specific technical information, please refer to the Manuals on the Navigation Bar. " +msgstr "" + +#: osparc/product/quickStart/s4lacad/Welcome.js +msgid "Welcome to Sim4Life Science" +msgstr "" + +#: osparc/tours/Step.js +msgid "Skip" +msgstr "" + +#: osparc/navigation/PrevNextButtons.js +msgid "Next" +msgstr "" + +#: osparc/tours/Step.js +msgid "To Tours" +msgstr "" + +#: osparc/tours/Step.js +msgid "Step: " +msgstr "" + +#: osparc/admin/Announcements.js +msgid "End" +msgstr "" + +#: osparc/desktop/credits/BuyCreditsForm.js +msgid "A one-off, non recurring payment." +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Pay with" +msgstr "" + +#: osparc/desktop/credits/BuyCreditsForm.js +msgid "Enter card details in the next step..." +msgstr "" + +#: osparc/share/NewCollaboratorsManager.js +msgid "Share with" +msgstr "" + +#: osparc/share/NewCollaboratorsManager.js +msgid "Select users or organizations from the list below." +msgstr "" + +#: osparc/share/NewCollaboratorsManager.js +msgid "Select users from the list below." +msgstr "" + +#: osparc/share/NewCollaboratorsManager.js +msgid "
Search them if they aren't listed." +msgstr "" + +#: osparc/po/Users.js +msgid "Search" +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "Organizations" +msgstr "" + +#: osparc/desktop/organizations/OrganizationsWindow.js +msgid "Organization details" +msgstr "" + +#: osparc/share/ShareePermissions.js +msgid "The following users/groups will not be able to open the shared study, because they don't have access to some services. Please contact the service owner(s) to give permission." +msgstr "" + +#: osparc/utils/Validators.js +msgid "Color must be in hexadecimal form" +msgstr "" + +#: osparc/utils/Validators.js +msgid "Format is invalid" +msgstr "" + +#: osparc/widget/IntlTelInput.js +msgid "Invalid number" +msgstr "" + +#: osparc/widget/IntlTelInput.js +msgid "Invalid country code" +msgstr "" + +#: osparc/widget/IntlTelInput.js +msgid "Number too short" +msgstr "" + +#: osparc/widget/IntlTelInput.js +msgid "Number too long" +msgstr "" + +#: osparc/product/AboutProduct.js +msgid "About " +msgstr "" + +#: osparc/About.js +msgid " is an online-accessible, cloud-based, and collaborative computational modeling platform that was developed under the Common Fund’s Stimulating Peripheral Activity to Relieve Conditions (SPARC) program to ensure sustainable, reproducible, and FAIR (findable, accessible, interoperable, reusable) computational modeling in the field of bioelectronic medicine – from neural interfaces to peripheral nerve recruitment and the resulting effects on organ function.

For more information about SPARC and the services offered, visit the " +msgstr "" + +#: osparc/About.js +msgid "The platform is built upon a number of open-source resources - we can't do it all alone! Some of the technologies that we leverage include:" +msgstr "" + +#: osparc/About.js +msgid "Front-end" +msgstr "" + +#: osparc/About.js +msgid "Back-end" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Raw metadata" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Author" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Creation Date" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Last Modified" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Access Rights" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Service ID" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Service Key" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Service Integration Version" +msgstr "" + +#: osparc/info/MergedLarge.js +msgid "Service Version" +msgstr "" + +#: osparc/desktop/account/MyAccount.js +msgid "Profile" +msgstr "" + +#: osparc/desktop/account/MyAccount.js +msgid "Confirmations" +msgstr "" + +#: osparc/desktop/account/MyAccount.js +msgid "API Keys/Tokens" +msgstr "" + +#: osparc/desktop/account/MyAccount.js +msgid "Create/Edit Tags" +msgstr "" + +#: osparc/admin/AdminCenterWindow.js +msgid "Admin Center" +msgstr "" + +#: osparc/po/POCenterWindow.js +msgid "PO Center" +msgstr "" + +#: osparc/tester/TesterCenterWindow.js +msgid "Tester Center" +msgstr "" + +#: osparc/desktop/credits/BillingCenterWindow.js +msgid "Billing Center" +msgstr "" + +#: osparc/vipMarket/MarketWindow.js +msgid "The Shop" +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "About oSPARC" +msgstr "" + +#: osparc/product/AboutProduct.js +msgid "About Product" +msgstr "" + +#: osparc/navigation/UserMenu.js +msgid "License" +msgstr "" + +#: osparc/desktop/credits/CreditsIndicator.js +msgid " credits" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethods.js +msgid "Credit cards used for payments in your personal Credit Account" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Add Payment Method" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethods.js +msgid "Fetching Payment Methods" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethods.js +msgid "The window was closed. Try again and follow the instructions inside the opened window." +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethods.js +msgid "No Payment Methods found" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethods.js +msgid "Could not retrieve your saved payment methods. Please try again later." +msgstr "" + +#: osparc/desktop/credits/Transactions.js +msgid "Top-up and refunds in US Dollars associated to your personal account show up here." +msgstr "" + +#: osparc/desktop/account/DeleteAccount.js +msgid "Your email" +msgstr "" + +#: osparc/desktop/account/DeleteAccount.js +msgid "Your password" +msgstr "" + +#: osparc/workbench/DiskUsageIndicator.js +msgid "Disk usage" +msgstr "" + +#: osparc/workbench/DiskUsageIndicator.js +msgid "Data storage: " +msgstr "" + +#: osparc/workbench/DiskUsageIndicator.js +msgid "I/O storage: " +msgstr "" + +#: osparc/dashboard/WorkspaceButtonNew.js +msgid "New Workspace" +msgstr "" + +#: osparc/dashboard/FolderButtonNew.js +msgid "New folder" +msgstr "" + +#: osparc/dashboard/NewPlusMenu.js +msgid "New Folder" +msgstr "" + +#: osparc/dashboard/MoveResourceTo.js +msgid "Current location:" +msgstr "" + +#: osparc/dashboard/MoveResourceTo.js +msgid "- Workspace: " +msgstr "" + +#: osparc/dashboard/MoveResourceTo.js +msgid "- Folder: " +msgstr "" + +#: osparc/dashboard/StudyBrowserHeader.js +msgid "Delete all" +msgstr "" + +#: osparc/dashboard/StudyBrowserHeader.js +msgid "All items will be permanently deleted" +msgstr "" + +#: osparc/dashboard/StudyBrowserHeader.js +msgid "Shared Workspaces" +msgstr "" + +#: osparc/dashboard/StudyBrowserHeader.js +msgid "Search results" +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "Edit..." +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "Edit Workspace" +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "Share Workspace" +msgstr "" + +#: osparc/vipMarket/SortModelsButtons.js +msgid "Sort" +msgstr "" + +#: osparc/pricing/PlanEditor.js +msgid "Name" +msgstr "" + +#: osparc/dashboard/SortedByMenuButton.js +msgid "Created" +msgstr "" + +#: osparc/dashboard/SortedByMenuButton.js +msgid "Modified" +msgstr "" + +#: osparc/study/Import.js +msgid "Max file size 10GB" +msgstr "" + +#: osparc/workbench/BaseNodeUI.js +msgid "Rename" +msgstr "" + +#: osparc/widget/Renamer.js +msgid "Type text" +msgstr "" + +#: osparc/editor/ThumbnailEditor.js +msgid "Error checking link" +msgstr "" + +#: osparc/editor/ThumbnailEditor.js +msgid "url" +msgstr "" + +#: osparc/editor/ThumbnailEditor.js +msgid "or pick one from the list below:" +msgstr "" + +#: osparc/task/Import.js +msgid "Importing Study" +msgstr "" + +#: osparc/task/Duplicate.js +msgid "Duplicating:" +msgstr "" + +#: osparc/task/ToTemplate.js +msgid "Publishing:" +msgstr "" + +#: osparc/service/SortServicesButtons.js +msgid "Hits" +msgstr "" + +#: osparc/service/SortServicesButtons.js +msgid "Name Asc" +msgstr "" + +#: osparc/service/SortServicesButtons.js +msgid "Name Desc" +msgstr "" + +#: osparc/widget/CollapsibleViewLight.js +msgid "Expand" +msgstr "" + +#: osparc/widget/CollapsibleViewLight.js +msgid "Collapse" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "Study Information" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "DESCRIPTION" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "More Info" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "AUTHOR" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "CREATED" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "MODIFIED" +msgstr "" + +#: osparc/info/StudyMedium.js +msgid "TAGS" +msgstr "" + +#: osparc/widget/NodeOptions.js +msgid "To proceed with the following actions, the service needs to be Stopped." +msgstr "" + +#: osparc/desktop/SlideshowToolbar.js +msgid "Save App Mode" +msgstr "" + +#: osparc/desktop/SlideshowToolbar.js +msgid "Stop App Mode" +msgstr "" + +#: osparc/navigation/PrevNextButtons.js +msgid "Previous" +msgstr "" + +#: osparc/node/slideshow/BaseNodeView.js +msgid "Service Information" +msgstr "" + +#: osparc/node/slideshow/BaseNodeView.js +msgid "Instructions" +msgstr "" + +#: osparc/node/slideshow/BaseNodeView.js +msgid "Preparing Inputs" +msgstr "" + +#: osparc/workbench/ServiceCatalog.js +msgid "Service catalog" +msgstr "" + +#: osparc/form/renderer/PropFormBase.js +msgid "Required input: without it, the service will not start/run." +msgstr "" + +#: osparc/widget/NodeSlideTreeItem.js +msgid "Edit Instructions" +msgstr "" + +#: osparc/jobs/JobsTable.js +msgid "Progress" +msgstr "" + +#: osparc/workbench/NodeUI.js +msgid "Convert to Iterator" +msgstr "" + +#: osparc/workbench/NodeUI.js +msgid "Convert to Parameter" +msgstr "" + +#: osparc/widget/NodeTreeItem.js +msgid "Remove Marker" +msgstr "" + +#: osparc/widget/NodeTreeItem.js +msgid "Add Marker" +msgstr "" + +#: osparc/data/model/IframeHandler.js +msgid "There was an issue starting" +msgstr "" + +#: osparc/widget/NodeOutputs.js +msgid "Connects a Probe to this output" +msgstr "" + +#: osparc/file/FileDownloadLink.js +msgid "An issue occurred while checking link" +msgstr "" + +#: osparc/file/FileDownloadLink.js +msgid "Type a Link" +msgstr "" + +#: osparc/file/FileDrop.js +msgid "Upload file" +msgstr "" + +#: osparc/file/FileDrop.js +msgid "Drop file from File Explorer" +msgstr "" + +#: osparc/file/FileDrop.js +msgid "Drop file from tree" +msgstr "" + +#: osparc/file/FileDrop.js +msgid "or" +msgstr "" + +#: osparc/file/FileDrop.js +msgid "Only one file can be uploaded at a time." +msgstr "" + +#: osparc/file/FileDrop.js +msgid "Please compress all files into a single zip file." +msgstr "" + +#: osparc/file/FileDrop.js +msgid "Drop me" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "Folders are not accepted. Please upload a zip file instead." +msgstr "" + +#: osparc/file/FolderViewer.js +msgid "Select folder" +msgstr "" + +#: osparc/file/FolderViewer.js +msgid "Multiselect" +msgstr "" + +#: osparc/editor/TextEditor.js +msgid "Write" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Execute" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Public" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Limit" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Runtime check:
The service can consume a maximum of 'limit' resources - if it attempts to use more resources than this limit, it will be stopped" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Resources" +msgstr "" + +#: osparc/node/UpdateResourceLimitsView.js +msgid "Resource Limits" +msgstr "" + +#: osparc/info/ServiceUtils.js +msgid "Share with Collaborators and Organizations" +msgstr "" + +#: osparc/editor/MarkdownEditor.js +msgid "Markdown supported" +msgstr "" + +#: osparc/editor/HtmlEditor.js +msgid "Preview" +msgstr "" + +#: osparc/form/tag/TagItem.js +msgid "Share Tag" +msgstr "" + +#: osparc/form/tag/TagItem.js +msgid "Color" +msgstr "" + +#: osparc/metadata/ServicesInStudy.js +msgid "Something went wrong while updating the service" +msgstr "" + +#: osparc/metadata/ServicesInStudy.js +msgid "The Study is empty" +msgstr "" + +#: osparc/metadata/ServicesInStudy.js +msgid "Could not retrieve service information" +msgstr "" + +#: osparc/share/PublishTemplate.js +msgid "Make the " +msgstr "" + +#: osparc/share/PublishTemplate.js +msgid " also accessible to:" +msgstr "" + +#: osparc/share/PublishTemplate.js +msgid "Publish for..." +msgstr "" + +#: osparc/dashboard/GroupedCardContainer.js +msgid "Show less" +msgstr "" + +#: osparc/dashboard/GroupedCardContainer.js +msgid "Show all" +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "Are you sure you want to delete the Workspace and all its content?" +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "All the content of the workspace will be deleted." +msgstr "" + +#: osparc/dashboard/WorkspaceButtonItem.js +msgid "Delete Workspace" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Edit Folder" +msgstr "" + +#: osparc/dashboard/FolderButtonItem.js +msgid "Delete Folder" +msgstr "" + +#: osparc/product/quickStart/s4llite/Welcome.js +msgid "Quick Start Guide" +msgstr "" + +#: osparc/product/quickStart/tis/Welcome.js +msgid " This quick tutorial gives a basic overview of how the TI Planning Tool works and how to navigate through the interface.
We will focus on two main aspects, how to:
- Use the platform
- Get started with a New Plan
" +msgstr "" + +#: osparc/product/quickStart/tis/Dashboard.js +msgid " The Dashboard is your private hub which contains all of your Plans as well as Plans that have been shared with you. From the Dashboard you are able to open your Plan or create a New Plan from scratch. " +msgstr "" + +#: osparc/product/quickStart/tis/Dashboard.js +msgid " 1) New Plan: by clicking on this card a new study will be created and open. The planning process will be presented in three successive steps that will be described more in detail in the following steps. " +msgstr "" + +#: osparc/product/quickStart/tis/Dashboard.js +msgid " 2) The other cards are TI Plans that were already created by you or shared with you. You can reopen them to do further analysis or by clicking three dots, on the top right corner, you can share, delete or check the details and metadata. " +msgstr "" + +#: osparc/product/quickStart/tis/ElectrodeSelector.js +msgid "Electrode Selector" +msgstr "" + +#: osparc/product/quickStart/tis/ElectrodeSelector.js +msgid " After pressing New Plan, three panels will be shown. " +msgstr "" + +#: osparc/product/quickStart/tis/ElectrodeSelector.js +msgid " In a first step, the relevant species, stimulation target, electrode shapes, electrode dimensions and potential electrode locations (currently required to narrow down the huge exposure configuration search space) are selected. " +msgstr "" + +#: osparc/product/quickStart/tis/ElectrodeSelector.js +msgid " After finishing the set up, the big button on the top right will turn blue and by clicking on it you will submit the configuration. " +msgstr "" + +#: osparc/product/quickStart/tis/ElectrodeSelector.js +msgid " Now the Arrow that says 'Next' can be pushed and the optimization will immediately start. " +msgstr "" + +#: osparc/product/quickStart/tis/PostPro.js +msgid "Post Processing" +msgstr "" + +#: osparc/product/quickStart/tis/PostPro.js +msgid " Based on extensive sweeping/optimization, a series of highly performing exposure parameters are proposed for the user to interactively explore, using predefined quantification metrics and visualizations. Identified conditions-of-interest can be documented and added to a report. " +msgstr "" + +#: osparc/product/quickStart/tis/PostPro.js +msgid " These metrics are reported in the Post Processing analysis environment for each electrode pair in the combination in a sorted tabular form that can be used to inspect the stimulation performances. By clicking on each pair, slice views of the maximum amplitude modulation (MAP) within the head are produced.
Pressing the `Load` button on the right, the selected configuration will be loaded. " +msgstr "" + +#: osparc/product/quickStart/tis/PostPro.js +msgid " Alternatively, slice views of the maximum interferential E-field can also be visualized and synchronous with the MAP slices to assess safety-related aspects (e.g., field intensity in proximity of the electrodes). These maps can be edited, thresholded, and saved offline for further inspection and analysis.
An isosurface of the TI stimulation distribution for the selected configuration can also be visualized within the head anatomy for inspection. " +msgstr "" + +#: osparc/product/quickStart/tis/PostPro.js +msgid " At the end of the optimization procedure, you can automatically generate a report.
It includes a summary of all the performance metrics calculated for each electrode pair combination, and a detailed performance report of the optimized electrode configuration. The report includes electrode placement, current intensities, performance metrics, TI and maximum high-frequency field distributions, cumulative dose histograms and all the graphs generated in the post-pro analysis tab. " +msgstr "" + +#: osparc/product/quickStart/tis/S4LPostPro.js +msgid "Not available in" +msgstr "" + +#: osparc/product/quickStart/tis/S4LPostPro.js +msgid "Sim4Life Post Processing" +msgstr "" + +#: osparc/product/quickStart/tis/S4LPostPro.js +msgid " Finally, and optionally, exposure conditions-of-interest can be visualized and analyzed freely, using the web-version of the Sim4Life (ZMT Zurich MedTech AG) computational life sciences platform. " +msgstr "" + +#: osparc/product/quickStart/tis/MoreInformation.js +msgid "For more information:" +msgstr "" + +#: osparc/product/quickStart/s4llite/Welcome.js +msgid " This quick user’s guide gives a short introduction to Sim4Life.lite. We will show:
- how to get started with a new project,
- how to get started from an existing tutorial project
- how to open Sim4Life desktop simulation projects in Sim4Life.lite,
- Sim4Life.lite features, limitations and user interface

For more specific technical information, please refer to the Dashboard Manual and the Sim4Life.lite Manual. " +msgstr "" + +#: osparc/product/quickStart/s4llite/Dashboard.js +msgid "Dashboard - Projects & Tutorials" +msgstr "" + +#: osparc/product/quickStart/s4llite/Dashboard.js +msgid " The Dashboard is the place where Projects and Tutorials can be accessed and organized. " +msgstr "" + +#: osparc/product/quickStart/s4llite/Dashboard.js +msgid " 1) Start Sim4Life.lite: Click the + Start Sim4Life.lite button to create a new project. This will start the user interface of Sim4Life.lite. " +msgstr "" + +#: osparc/product/quickStart/s4llite/Dashboard.js +msgid " 2) Other cards: Each card represents an existing project (own projects, or projects shared by other users) that can be accessed and managed. Click on the card to open the project. Click the “three dots” in the upper right corner of the card to perform operations such as rename, share, delete. " +msgstr "" + +#: osparc/product/quickStart/s4llite/Dashboard.js +msgid " 3) TUTORIALS: A set of pre-built read-only tutorial projects with results is available to all Sim4Life.lite users. When a tutorial is selected, a copy is automatically created and added to the user’s Projects tab. This new copy is editable and can be shared. " +msgstr "" + +#: osparc/product/quickStart/s4llite/Dashboard.js +msgid " 4) To open an existing desktop project in Sim4Life.lite: - Click the + Start Sim4Life.lite button to create a new project.
- Click the menu and select “File Browser…”.
- Click “Upload File” for the .smash project and select the file from your desktop. Repeat the same step, but this time select “Upload Folder” and then select the result folder from your desktop. Close the window
- Click the Menu again and click “Open” to select the file you just uploaded.
" +msgstr "" + +#: osparc/product/quickStart/s4llite/S4LLiteSpecs.js +msgid "Sim4Life.lite: Features and Limitations" +msgstr "" + +#: osparc/product/quickStart/s4llite/S4LLiteSpecs.js +msgid " Sim4Life.lite is a powerful web-based simulation platform that allows you to model and analyze real-world phenomena and to design complex technical devices in a validated environment. Sim4Life.lite has been created specifically for students to facilitate their understanding of computational modeling and simulations for various topics, ranging from wireless communication to medical applications. The access to Sim4Life.lite is available free of charge to students enrolled at registered universities. " +msgstr "" + +#: osparc/product/quickStart/s4llite/S4LLiteSpecs.js +msgid " Sim4Life.lite offers
- Framework (GUI, Modeling, Postprocessing)
- 3D modeling environment (based on the ACIS toolkit) and CAD translators
- Postprocessing and visualization of the simulation results (2D and 3D viewers, 2D planar slice, volume rendering, streamlines, surface fields on arbitrary 3D structures, radiation and far-field data)
- No restrictions on number of modeling objects
- Solvers & Tissue Models:
 - P-EM-FDTD: Electromagnetics Full-Wave Solver
 - P-EM-QS: Quasi-Static Electromagnetics Solver
 - P-Thermal: Thermodynamic Solver
 - P-Acoustics: Acoustics Solver
 - T-Neuro: Neuronal Tissue Models
- Computational anatomical model Yoon-sun, the first Korean model of the IT’IS Virtual Population
- Material database
- Python and Jupyter Notebook scripting " +msgstr "" + +#: osparc/product/quickStart/s4llite/S4LLiteSpecs.js +msgid " Limitations
The following limitations apply:
- Grid size of each simulation is limited to a maximum of 20 million grid cells
- High-Performance Computing is not supported:
 - GPU acceleration is not available
 - MPI multicore acceleration is not available
- 3rd-party tools are not available (e.g., MUSAIK, SYSSIM, IMAnalytics, etc…)
- Additional ViP models cannot be added
- 30 minutes idle time before logout
- Hardware resource limits
 - 3 CPUs
 - 3 GB of GPU RAM
 - 5 GB disk space
 - 16 GB RAM
" +msgstr "" + +#: osparc/product/quickStart/s4llite/S4LLiteUI.js +msgid "Sim4Life.lite" +msgstr "" + +#: osparc/product/quickStart/s4llite/S4LLiteUI.js +msgid " To check the Sim4Life.lite manual, please open a project and access the documentation via Help in the menu as shown below. Enjoy! " +msgstr "" + +#: osparc/desktop/credits/BuyCreditsInput.js +msgid "Credit Price" +msgstr "" + +#: osparc/desktop/credits/BuyCreditsInput.js +msgid "Credit Amount" +msgstr "" + +#: osparc/desktop/credits/BuyCreditsInput.js +msgid "Total" +msgstr "" + +#: osparc/filter/SearchingCollaborators.js +msgid "Searching..." +msgstr "" + +#: osparc/desktop/organizations/OrganizationsList.js +msgid " An organization is a group of users who can share " +msgstr "" + +#: osparc/desktop/organizations/OrganizationsList.js +msgid " and other resources.
Here you can see the list of organizations you belong to, create new organizations, or manage the membership by setting up the access rights of each member in the organization if you are a manager or administrator." +msgstr "" + +#: osparc/desktop/organizations/OrganizationsList.js +msgid "New Organization" +msgstr "" + +#: osparc/desktop/organizations/OrganizationDetails.js +msgid "Organization Details Editor" +msgstr "" + +#: osparc/desktop/organizations/OrganizationsList.js +msgid "Delete Organization" +msgstr "" + +#: osparc/desktop/organizations/OrganizationsList.js +msgid "Something went wrong while deleting " +msgstr "" + +#: osparc/pricing/PlanEditor.js +msgid " successfully created" +msgstr "" + +#: osparc/pricing/PlanEditor.js +msgid "Something went wrong while creating " +msgstr "" + +#: osparc/desktop/wallets/WalletDetails.js +msgid " successfully edited" +msgstr "" + +#: osparc/desktop/wallets/WalletDetails.js +msgid "Something went wrong while editing " +msgstr "" + +#: osparc/desktop/organizations/OrganizationDetails.js +msgid "Return to Organizations list" +msgstr "" + +#: osparc/desktop/wallets/WalletDetails.js +msgid "Members" +msgstr "" + +#: osparc/jobs/JobsTable.js +msgid "Job Id" +msgstr "" + +#: osparc/jobs/JobsTable.js +msgid "Solver" +msgstr "" + +#: osparc/desktop/credits/TransactionsTableModel.js +msgid "Status" +msgstr "" + +#: osparc/jobs/JobsTable.js +msgid "Submitted" +msgstr "" + +#: osparc/jobs/JobsTable.js +msgid "Started" +msgstr "" + +#: osparc/jobs/JobsTable.js +msgid "Info" +msgstr "" + +#: osparc/jobs/JobsTable.js +msgid "Instance" +msgstr "" + +#: osparc/notification/NotificationUI.js +msgid "You don't have access anymore" +msgstr "" + +#: osparc/notification/NotificationUI.js +msgid "Do you want to make it the default Credit Account?" +msgstr "" + +#: osparc/notification/NotificationUI.js +msgid "Default Credit Account" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Credits Indicator" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Show indicator" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Show warning when credits below" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Automatic Shutdown of Idle Instances" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Enter 0 to disable this function" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Idle time before closing (in minutes)" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Job Concurrency" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Maximum number of concurrent jobs" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Low Disk Space Threshold" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Set the warning Threshold for Low Disk Space availability" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Threshold (in GB)" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Help us improve Sim4Life user experience" +msgstr "" + +#: osparc/desktop/preferences/pages/GeneralPage.js +msgid "Share usage data" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Ask for confirmation for the following actions:" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Go back to the Dashboard" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Delete a " +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Warning: deleting a " +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid " cannot be undone" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Understood" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Delete a Node" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Warning: deleting a node cannot be undone" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Stop Node" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Snap Node to Grid" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "This is a list of experimental preferences" +msgstr "" + +#: osparc/desktop/preferences/pages/ConfirmationsPage.js +msgid "Connect ports automatically" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "API Keys" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "List API keys associated to your account." +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "New API Key" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Cannot create API Key" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Do you want to delete the API key?" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Delete API key" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Cannot delete API Key" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "API Tokens for External Services" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Provide the API tokens needed to access external services." +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Current Tokens" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Supported services" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Are you sure you want to delete this token?" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Delete Token" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Enter your token key" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Key" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Enter your token secret" +msgstr "" + +#: osparc/desktop/preferences/pages/TokensPage.js +msgid "Secret" +msgstr "" + +#: osparc/desktop/preferences/pages/TagsPage.js +msgid " Tags help you organize the " +msgstr "" + +#: osparc/desktop/preferences/pages/TagsPage.js +msgid " in the Dashboard by categorizing topics, making it easier to search and filter. Once the tags are created, they can be assigned to the " +msgstr "" + +#: osparc/desktop/preferences/pages/TagsPage.js +msgid " via 'More options...' on the " +msgstr "" + +#: osparc/desktop/preferences/pages/TagsPage.js +msgid " cards." +msgstr "" + +#: osparc/desktop/credits/CreditsPerService.js +msgid "No usage records found" +msgstr "" + +#: osparc/product/AboutProduct.js +msgid "Information is unavailable" +msgstr "" + +#: osparc/desktop/wallets/WalletsList.js +msgid "Personal" +msgstr "" + +#: osparc/desktop/wallets/WalletsList.js +msgid "No personal Credit Account found" +msgstr "" + +#: osparc/desktop/wallets/WalletsList.js +msgid "Shared with me" +msgstr "" + +#: osparc/desktop/wallets/WalletsList.js +msgid "No shared Credit Accounts found" +msgstr "" + +#: osparc/desktop/wallets/WalletDetails.js +msgid "Credit Account Details Editor" +msgstr "" + +#: osparc/desktop/wallets/WalletsList.js +msgid "Something went wrong while updating the Credit Account" +msgstr "" + +#: osparc/desktop/wallets/WalletsList.js +msgid "Currently in use" +msgstr "" + +#: osparc/desktop/wallets/WalletDetails.js +msgid "Return to Credit Accounts list" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethodListItem.js +msgid "Are you sure you want to delete this Payment Method?" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethodListItem.js +msgid "Delete Payment Method" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethodDetails.js +msgid "Payment Method details" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethodDetails.js +msgid "Card Holder name" +msgstr "" + +#: osparc/filter/NodeTypeFilter.js +msgid "Type" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethodDetails.js +msgid "Number" +msgstr "" + +#: osparc/desktop/paymentMethods/PaymentMethodDetails.js +msgid "Expiration date" +msgstr "" + +#: osparc/desktop/credits/ResourceInTableViewer.js +msgid "Export" +msgstr "" + +#: osparc/desktop/credits/UsageTable.js +msgid "Node" +msgstr "" + +#: osparc/desktop/credits/CheckoutsTable.js +msgid "Duration" +msgstr "" + +#: osparc/study/PricingUnitLicense.js +msgid "Credits" +msgstr "" + +#: osparc/desktop/credits/PurchasesTable.js +msgid "PurchaseId" +msgstr "" + +#: osparc/desktop/credits/CheckoutsTable.js +msgid "ItemId" +msgstr "" + +#: osparc/desktop/credits/CheckoutsTable.js +msgid "Seats" +msgstr "" + +#: osparc/desktop/credits/CheckoutsTable.js +msgid "CheckoutId" +msgstr "" + +#: osparc/vipMarket/Market.js +msgid "My Models" +msgstr "" + +#: osparc/editor/OrganizationEditor.js +msgid "Thumbnail" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "Double-click to add a node" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "INPUTS" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "OUTPUTS" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "Pick the position" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "Draw a rectangle" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "Drop here" +msgstr "" + +#: osparc/workbench/WorkbenchUI.js +msgid "Draw a rectangle first" +msgstr "" + +#: osparc/widget/NodeTreeItem.js +msgid "Full Screen" +msgstr "" + +#: osparc/node/LifeCycleView.js +msgid "Update Service" +msgstr "" + +#: osparc/node/UpdateResourceLimitsView.js +msgid "Limits have been successfully updated" +msgstr "" + +#: osparc/node/UpdateResourceLimitsView.js +msgid "Something went wrong while updating the limits" +msgstr "" + +#: osparc/navigation/BreadcrumbsSlideshow.js +msgid "Pipeline is empty" +msgstr "" + +#: osparc/navigation/BreadcrumbsSlideshow.js +msgid "No visible nodes. Enable some by adjusting the app mode." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Please wait, this process may take a few minutes ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "LOADING ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Provisioning resources ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Setting up system software ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Retrieving your output data ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Retrieving your work ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Installing services ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Starting services ..." +msgstr "" + +#: osparc/data/model/NodeProgressSequence.js +msgid "Retrieving your input data ..." +msgstr "" + +#: osparc/ui/basic/NodeStatusUI.js +msgid "Select a file" +msgstr "" + +#: osparc/ui/basic/NodeStatusUI.js +msgid "Uploading..." +msgstr "" + +#: osparc/widget/PreparingInputs.js +msgid "To proceed, we need to prepare some inputs. You can check the progress logs here:" +msgstr "" + +#: osparc/widget/PreparingInputs.js +msgid "Run all" +msgstr "" + +#: osparc/widget/PreparingInputs.js +msgid "Logs" +msgstr "" + +#: osparc/ui/form/ContentSchemaHelper.js +msgid "Minimum items: " +msgstr "" + +#: osparc/ui/form/ContentSchemaHelper.js +msgid "Maximum items: " +msgstr "" + +#: osparc/ui/form/ContentSchemaHelper.js +msgid "Out of range" +msgstr "" + +#: osparc/ui/form/ContentSchemaHelper.js +msgid "Minimum value: " +msgstr "" + +#: osparc/ui/form/ContentSchemaHelper.js +msgid "Maximum value: " +msgstr "" + +#: osparc/file/FolderContent.js +msgid "Date Modified" +msgstr "" + +#: osparc/file/FolderContent.js +msgid "Size" +msgstr "" + +#: osparc/file/FolderContent.js +msgid "Id" +msgstr "" + +#: osparc/file/FileLabelWithActions.js +msgid "This action cannot be undone." +msgstr "" + +#: osparc/file/FileLabelWithActions.js +msgid "All contents within the folders will be deleted." +msgstr "" + +#: osparc/file/FileLabelWithActions.js +msgid "Do you want to proceed?" +msgstr "" + +#: osparc/file/FileLabelWithActions.js +msgid "Items successfully deleted" +msgstr "" + +#: osparc/file/FileLabelWithActions.js +msgid "Externally managed items cannot be deleted" +msgstr "" + +#: osparc/editor/HtmlEditor.js +msgid "Supports HTML" +msgstr "" + +#: osparc/study/PricingUnitTier.js +msgid "Credits/h" +msgstr "" + +#: osparc/study/PricingUnitTier.js +msgid "EC2" +msgstr "" + +#: osparc/ui/list/OrganizationListItem.js +msgid " members" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Edit details..." +msgstr "" + +#: osparc/desktop/wallets/MembersList.js +msgid "Add Members..." +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "You can add new members and assign roles." +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "You cannot add new members to this Organization. Please contact an Administrator or Manager." +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid " successfully added" +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "Something went wrong while adding the user" +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "Something went wrong while promoting to " +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "Something went wrong while demoting to " +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "Are you sure you want to leave?" +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "If you leave, the page will reload." +msgstr "" + +#: osparc/desktop/organizations/MembersList.js +msgid "Leave Organization" +msgstr "" + +#: osparc/desktop/organizations/TemplatesList.js +msgid "This is the list of " +msgstr "" + +#: osparc/desktop/organizations/TemplatesList.js +msgid " shared with this Organization" +msgstr "" + +#: osparc/desktop/organizations/ServicesList.js +msgid "This is the list of services shared with this Organization" +msgstr "" + +#: osparc/jobs/JobInfo.js +msgid "Job Info" +msgstr "" + +#: osparc/desktop/preferences/window/CreateAPIKey.js +msgid "Generate API Key" +msgstr "" + +#: osparc/desktop/preferences/window/CreateAPIKey.js +msgid "Key names must be unique." +msgstr "" + +#: osparc/desktop/preferences/window/CreateAPIKey.js +msgid "Key Name" +msgstr "" + +#: osparc/desktop/preferences/window/CreateAPIKey.js +msgid "Expiration Date" +msgstr "" + +#: osparc/desktop/preferences/window/CreateAPIKey.js +msgid "Select a future date" +msgstr "" + +#: osparc/desktop/preferences/window/CreateAPIKey.js +msgid "Confirm" +msgstr "" + +#: osparc/desktop/preferences/window/ShowAPIKey.js +msgid "API Key" +msgstr "" + +#: osparc/desktop/preferences/window/ShowAPIKey.js +msgid "For your security, store your access keys safely. You will not be able to access them again after closing this window." +msgstr "" + +#: osparc/desktop/preferences/window/ShowAPIKey.js +msgid "Key:" +msgstr "" + +#: osparc/desktop/preferences/window/ShowAPIKey.js +msgid "Secret:" +msgstr "" + +#: osparc/desktop/preferences/window/ShowAPIKey.js +msgid "Base url:" +msgstr "" + +#: osparc/desktop/preferences/window/ShowAPIKey.js +msgid "API Secret" +msgstr "" + +#: osparc/desktop/preferences/window/ShowAPIKey.js +msgid "Base URL" +msgstr "" + +#: osparc/admin/AdminCenter.js +msgid "Pricing Plans" +msgstr "" + +#: osparc/admin/AdminCenter.js +msgid "Maintenance" +msgstr "" + +#: osparc/admin/AdminCenter.js +msgid "Announcements" +msgstr "" + +#: osparc/po/POCenter.js +msgid "Users" +msgstr "" + +#: osparc/po/POCenter.js +msgid "Pre-Registration" +msgstr "" + +#: osparc/po/POCenter.js +msgid "Invitations" +msgstr "" + +#: osparc/po/POCenter.js +msgid "Product Info" +msgstr "" + +#: osparc/po/POCenter.js +msgid "Message Templates" +msgstr "" + +#: osparc/tester/TesterCenter.js +msgid "Socket Messages" +msgstr "" + +#: osparc/tester/TesterCenter.js +msgid "Console Errors" +msgstr "" + +#: osparc/tester/Statics.js +msgid "Statics" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Something went wrong while updating the state" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Auto-recharge" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Auto-recharge: ON" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Auto-recharge: OFF" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "ON" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "OFF" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Credit Account enabled" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Credit Account blocked" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Currently being used" +msgstr "" + +#: osparc/desktop/wallets/WalletListItem.js +msgid "Switch to this Credit Account" +msgstr "" + +#: osparc/desktop/wallets/MembersList.js +msgid "Only Accountants of an Organization can share a wallet with other users." +msgstr "" + +#: osparc/vipMarket/SortModelsButtons.js +msgid "Date" +msgstr "" + +#: osparc/desktop/credits/TransactionsTableModel.js +msgid "Price USD" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Comment" +msgstr "" + +#: osparc/desktop/credits/TransactionsTableModel.js +msgid "Invoice" +msgstr "" + +#: osparc/vipMarket/VipMarket.js +msgid "Loading" +msgstr "" + +#: osparc/vipMarket/VipMarket.js +msgid "Cannot purchase model" +msgstr "" + +#: osparc/desktop/StartStopButtons.js +msgid "Run All" +msgstr "" + +#: osparc/desktop/StartStopButtons.js +msgid "Run Selection" +msgstr "" + +#: osparc/desktop/ZoomButtons.js +msgid "Zoom In" +msgstr "" + +#: osparc/desktop/ZoomButtons.js +msgid "Zoom Out" +msgstr "" + +#: osparc/desktop/ZoomButtons.js +msgid "Reset Zoom" +msgstr "" + +#: osparc/editor/AnnotationNoteCreator.js +msgid "Add Note" +msgstr "" + +#: osparc/editor/AnnotationNoteCreator.js +msgid "Add a recipient to be notified. Please make sure the user has access to the " +msgstr "" + +#: osparc/editor/AnnotationNoteCreator.js +msgid "Select recipient" +msgstr "" + +#: osparc/desktop/wallets/MemberListItem.js +msgid "Promote to " +msgstr "" + +#: osparc/desktop/wallets/MemberListItem.js +msgid "Demote to " +msgstr "" + +#: osparc/desktop/wallets/MemberListItem.js +msgid "Remove " +msgstr "" + +#: osparc/admin/Maintenance.js +msgid "Start and End dates go in UTC time zone" +msgstr "" + +#: osparc/admin/Maintenance.js +msgid "No Maintenance scheduled" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "Create announcement" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "title" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "description" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "link" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "Link" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "Login" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "Ribbon" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "User Menu" +msgstr "" + +#: osparc/po/Invitations.js +msgid "Generate" +msgstr "" + +#: osparc/admin/Announcements.js +msgid "Copy announcement" +msgstr "" + +#: osparc/po/Users.js +msgid "user@email.address or user@*" +msgstr "" + +#: osparc/po/Users.js +msgid "Searching users..." +msgstr "" + +#: osparc/po/Users.js +msgid " user(s) found" +msgstr "" + +#: osparc/po/Users.js +msgid "Error searching users" +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "Pre-Registration" +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "Copy&Paste the Request Account Form in JSON format here ..." +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "Request Form" +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "Unsuccessful Pre-Registration. See details below" +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "Searching Pre-Registered users..." +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "Pre-Registered as:" +msgstr "" + +#: osparc/po/PreRegistration.js +msgid "No Pre-Registered user found" +msgstr "" + +#: osparc/po/Invitations.js +msgid "Create invitation" +msgstr "" + +#: osparc/po/Invitations.js +msgid "There is no invitation required in this product/deployment." +msgstr "" + +#: osparc/po/Invitations.js +msgid "new.user@email.address" +msgstr "" + +#: osparc/po/Invitations.js +msgid "User Email" +msgstr "" + +#: osparc/po/Invitations.js +msgid "Welcome Credits (USD)" +msgstr "" + +#: osparc/po/Invitations.js +msgid "With expiration" +msgstr "" + +#: osparc/po/Invitations.js +msgid "Trial Days" +msgstr "" + +#: osparc/po/Invitations.js +msgid "Remember that this is a one time use link" +msgstr "" + +#: osparc/po/Invitations.js +msgid "Copy invitation link" +msgstr "" + +#: osparc/po/Invitations.js +msgid "Data encrypted in the invitation" +msgstr "" + +#: osparc/po/MessageTemplates.js +msgid "Template updated" +msgstr "" + +#: osparc/tester/ConsoleErrors.js +msgid "Search in Message" +msgstr "" + +#: osparc/tester/WebSocketMessages.js +msgid "Channel" +msgstr "" + +#: osparc/tester/ConsoleErrors.js +msgid "Message" +msgstr "" + +#: osparc/tester/Statics.js +msgid "Local Storage" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Keep your balance running smoothly by automatically setting your credits to be recharged when it runs low." +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Before the auto-recharge function can be activated you need to add your first payment method" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Recharging amount (USD)" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Monthly limit (USD)" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Maximum amount in USD charged within a natural month." +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "To disable spending limit, clear input field" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Save and close" +msgstr "" + +#: osparc/desktop/credits/AutoRecharge.js +msgid "Changes on the Auto recharge were successfully saved" +msgstr "" + +#: osparc/vipMarket/LicensedItemDetails.js +msgid "Select a model for more details" +msgstr "" + +#: osparc/vipMarket/LicensedItemDetails.js +msgid "This bundle contains:" +msgstr "" + +#: osparc/vipMarket/LicensedItemDetails.js +msgid "Terms and Conditions" +msgstr "" + +#: osparc/vipMarket/LicensedItemDetails.js +msgid "Terms and Conditions" +msgstr "" + +#: osparc/vipMarket/LicensedItemDetails.js +msgid "Available for Importing" +msgstr "" + +#: osparc/form/ColorPicker.js +msgid "Pick a color" +msgstr "" + +#: osparc/pricing/Plans.js +msgid "New Pricing Plan" +msgstr "" + +#: osparc/pricing/Plans.js +msgid "Pricing Plan Creator" +msgstr "" + +#: osparc/pricing/Plans.js +msgid "Pricing Plan Editor" +msgstr "" + +#: osparc/pricing/PlanDetails.js +msgid "Return to Pricing Plans" +msgstr "" + +#: osparc/pricing/PlanDetails.js +msgid "Pricing Units" +msgstr "" + +#: osparc/study/PricingUnitLicense.js +msgid "Duration: 1 year" +msgstr "" + +#: osparc/study/PricingUnitLicense.js +msgid "Rent" +msgstr "" + +#: osparc/study/PricingUnitLicense.js +msgid "will be available until " +msgstr "" + +#: osparc/pricing/PlanEditor.js +msgid "Pricing Plan Key" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Successfully updated" +msgstr "" + +#: osparc/pricing/UnitsList.js +msgid "New Pricing Unit" +msgstr "" + +#: osparc/pricing/UnitsList.js +msgid "Pricing Unit Creator" +msgstr "" + +#: osparc/pricing/UnitsList.js +msgid "Pricing Unit Editor" +msgstr "" + +#: osparc/pricing/ServicesList.js +msgid "Add Service" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Unit Name" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Cost per unit" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Specific info" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "CPU" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "RAM" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "VRAM" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "More Extra Info" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Number of Seats" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Default" +msgstr "" + +#: osparc/pricing/UnitEditor.js +msgid "Successfully created" +msgstr "" diff --git a/services/static-webserver/client/source/translation/readme.txt b/services/static-webserver/client/source/translation/readme.txt index 08513ca8c3d..f7ff92f0c4d 100644 --- a/services/static-webserver/client/source/translation/readme.txt +++ b/services/static-webserver/client/source/translation/readme.txt @@ -1,4 +1,4 @@ This directory will contain translation (.po) files once you run the 'translation' job in your project. -NOTE: for the moment all translation files are in the .gitignore +Add more "locales" entries in compile.json to generate their .po files diff --git a/services/storage/.env-devel b/services/storage/.env-devel index b27bc54c999..96d465822d3 100644 --- a/services/storage/.env-devel +++ b/services/storage/.env-devel @@ -1,31 +1,20 @@ -# Environment used to configure storage services -# -# - To expose in cmd: export $(grep -v '^#' .env-devel | xargs -0) -# +CELERY_RESULT_EXPIRES=P7D -# environs in Dockerfile ---------------- -SC_BOOT_MODE=local-development +RABBIT_HOST=rabbit +RABBIT_PASSWORD=adminadmin +RABBIT_PORT=5672 +RABBIT_SECURE=false +RABBIT_USER=admin +REDIS_HOST=redis +REDIS_PORT=6379 +REDIS_PASSWORD=adminadmin +REDIS_SECURE=false +REDIS_USER=null -# environs in docker-compose.yml -------- -POSTGRES_DB=testdb -POSTGRES_ENDPOINT=localhost:5432 -POSTGRES_HOST=localhost -POSTGRES_PASSWORD=secret -POSTGRES_PORT=5432 -POSTGRES_USER=test - +STORAGE_ENDPOINT=storage:8080 +STORAGE_HOST=storage STORAGE_LOGLEVEL=INFO -STORAGE_MONITORING_ENABLED=False - -S3_ACCESS_KEY=12345678 -S3_BUCKET_NAME=simcore -# 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface. -S3_ENDPOINT=http://172.17.0.1:9001 -S3_SECRET_KEY=12345678 -S3_REGION=us-east-1 - -BF_API_SECRET=none -BF_API_KEY=none - -TRACING_OBSERVABILITY_BACKEND_ENDPOINT=http://jaeger:9411 +STORAGE_PORT=8080 +STORAGE_PROFILING=1 +STORAGE_TRACING=null diff --git a/services/storage/Makefile b/services/storage/Makefile index 87234a46b62..ef350cae091 100644 --- a/services/storage/Makefile +++ b/services/storage/Makefile @@ -4,27 +4,15 @@ include ../../scripts/common.Makefile include ../../scripts/common-service.Makefile -APP_OPENAPI_RELPATH=src/$(APP_PACKAGE_NAME)/api/v0/openapi.yaml - -.PHONY: openapi-specs -openapi-specs: ## updates and validates openapi specifications - $(MAKE_C) $(REPO_BASE_DIR)/api/specs/$(APP_NAME) all - # validates OAS file: $(APP_OPENAPI_RELPATH) - $(call validate_openapi_specs,$(APP_OPENAPI_RELPATH)) - - -.PHONY: tests -tests: ## runs unit tests - # running unit tests - @pytest -vv --asyncio-mode=auto --failed-first --durations=10 --pdb $(CURDIR)/tests - - -# DEVELOPMENT ######## -.env: - cp .env-devel $@ - -run-devel: .env - # start app (within $<) in prod mode - export $(shell grep -v '^#' .env-devel | xargs -0); \ - $(APP_CLI_NAME) +.PHONY: openapi.json +openapi-specs: openapi.json +openapi.json: .env + # generating openapi specs file (need to have the environment set for this) + @set -o allexport; \ + source $<; \ + set +o allexport; \ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app.openapi(), indent=2) )" > $@ + + # validates OAS file: $@ + $(call validate_openapi_specs,$@) diff --git a/services/storage/VERSION b/services/storage/VERSION index 8f0916f768f..a918a2aa18d 100644 --- a/services/storage/VERSION +++ b/services/storage/VERSION @@ -1 +1 @@ -0.5.0 +0.6.0 diff --git a/services/storage/docker/boot.sh b/services/storage/docker/boot.sh index ea9bf8bd877..4cc5128b43a 100755 --- a/services/storage/docker/boot.sh +++ b/services/storage/docker/boot.sh @@ -6,11 +6,16 @@ IFS=$(printf '\n\t') INFO="INFO: [$(basename "$0")] " -# BOOTING application --------------------------------------------- echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." echo "$INFO" "User :$(id "$(whoami)")" echo "$INFO" "Workdir : $(pwd)" +# +# DEVELOPMENT MODE +# +# - prints environ info +# - installs requirements in mounted volume +# if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "Environment :" printenv | sed 's/=/: /' | sed 's/^/ /' | sort @@ -19,35 +24,62 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/storage - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list - - echo "$INFO" "Setting entrypoint to use watchmedo autorestart..." - entrypoint='watchmedo auto-restart --recursive --pattern="*.py;*/src/*" --ignore-patterns="*test*;pytest_simcore/*;setup.py;*ignore*" --ignore-directories --' - -elif [ "${SC_BUILD_TARGET}" = "production" ]; then - entrypoint="" fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi +# +# RUNNING application +# APP_LOG_LEVEL=${STORAGE_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') - -# RUNNING application ---------------------------------------- -echo "$INFO" "Selected config ${SC_BUILD_TARGET}" echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" -if [ "${SC_BOOT_MODE}" = "debug" ]; then - # NOTE: needs debupgy installed - echo "$INFO" "Debugpy initializing in port ${STORAGE_REMOTE_DEBUGGING_PORT} with ${SC_BUILD_TARGET}" - eval "$entrypoint" python3 -m debugpy --listen 0.0.0.0:"${STORAGE_REMOTE_DEBUGGING_PORT}" -m \ - simcore_service_storage run +if [ "${STORAGE_WORKER_MODE}" = "true" ]; then + if [ "${SC_BOOT_MODE}" = "debug" ]; then + exec watchmedo auto-restart \ + --directory /devel/packages \ + --directory services/storage \ + --pattern "*.py" \ + --recursive \ + -- \ + celery \ + --app=simcore_service_storage.modules.celery.worker_main:app \ + worker --pool=threads \ + --loglevel="${SERVER_LOG_LEVEL}" \ + --concurrency="${CELERY_CONCURRENCY}" + else + exec celery \ + --app=simcore_service_storage.modules.celery.worker_main:app \ + worker --pool=threads \ + --loglevel="${SERVER_LOG_LEVEL}" \ + --concurrency="${CELERY_CONCURRENCY}" + fi else - exec simcore-service-storage run + if [ "${SC_BOOT_MODE}" = "debug" ]; then + reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + + exec sh -c " + cd services/storage/src/simcore_service_storage && \ + python -m debugpy --listen 0.0.0.0:${STORAGE_REMOTE_DEBUGGING_PORT} -m uvicorn main:app \ + --host 0.0.0.0 \ + --port ${STORAGE_PORT} \ + --reload \ + $reload_dir_packages + --reload-dir . \ + --log-level \"${SERVER_LOG_LEVEL}\" + " + else + exec uvicorn simcore_service_storage.main:app \ + --host 0.0.0.0 \ + --port ${STORAGE_PORT} \ + --log-level "${SERVER_LOG_LEVEL}" + fi fi diff --git a/services/storage/docker/healthcheck.py b/services/storage/docker/healthcheck.py old mode 100644 new mode 100755 index b6711cd55eb..d938c860dab --- a/services/storage/docker/healthcheck.py +++ b/services/storage/docker/healthcheck.py @@ -20,18 +20,50 @@ import os +import socket +import subprocess import sys from urllib.request import urlopen +from simcore_service_storage.core.settings import ApplicationSettings + SUCCESS, UNHEALTHY = 0, 1 # Disabled if boots with debugger -ok = os.environ.get("SC_BOOT_MODE", "").lower() == "debug" +ok = os.getenv("SC_BOOT_MODE", "").lower() == "debug" # Queries host # pylint: disable=consider-using-with + +app_settings = ApplicationSettings.create_from_envs() + +def _is_celery_worker_healthy(): + assert app_settings.STORAGE_CELERY + broker_url = app_settings.STORAGE_CELERY.CELERY_RABBIT_BROKER.dsn + + try: + result = subprocess.run( + [ + "celery", + "--broker", + broker_url, + "inspect", + "ping", + "--destination", + "celery@" + socket.gethostname(), + ], + capture_output=True, + text=True, + check=True, + ) + return "pong" in result.stdout + except subprocess.CalledProcessError: + return False + + ok = ( ok + or (app_settings.STORAGE_WORKER_MODE and _is_celery_worker_healthy()) or urlopen( "{host}{baseurl}".format( host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") diff --git a/services/storage/openapi.json b/services/storage/openapi.json new file mode 100644 index 00000000000..2c5676b08bc --- /dev/null +++ b/services/storage/openapi.json @@ -0,0 +1,2730 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "simcore_service_storage", + "description": "Service that manages osparc storage backend", + "version": "0.6.0" + }, + "paths": { + "/v0/": { + "get": { + "tags": [ + "status" + ], + "summary": "Get Health", + "operationId": "get_health_v0__get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_HealthCheck_" + } + } + } + } + } + } + }, + "/v0/status": { + "get": { + "tags": [ + "status" + ], + "summary": "Get Status", + "operationId": "get_status_v0_status_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_AppStatusCheck_" + } + } + } + } + } + } + }, + "/v0/locations": { + "get": { + "tags": [ + "locations" + ], + "summary": "List Storage Locations", + "operationId": "list_storage_locations_v0_locations_get", + "parameters": [ + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_list_FileLocation__" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/datasets": { + "get": { + "tags": [ + "datasets" + ], + "summary": "List Datasets Metadata", + "operationId": "list_datasets_metadata_v0_locations__location_id__datasets_get", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_list_DatasetMetaDataGet__" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/datasets/{dataset_id}/metadata": { + "get": { + "tags": [ + "datasets" + ], + "summary": "List Dataset Files Metadata", + "operationId": "list_dataset_files_metadata_v0_locations__location_id__datasets__dataset_id__metadata_get", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "dataset_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Dataset Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "expand_dirs", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": true, + "title": "Expand Dirs" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_list_FileMetaDataGet__" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/files/metadata": { + "get": { + "tags": [ + "files" + ], + "summary": "List Files Metadata", + "operationId": "list_files_metadata_v0_locations__location_id__files_metadata_get", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "project_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "title": "Project Id" + } + }, + { + "name": "uuid_filter", + "in": "query", + "required": false, + "schema": { + "type": "string", + "default": "", + "title": "Uuid Filter" + } + }, + { + "name": "expand_dirs", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": true, + "title": "Expand Dirs" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_list_FileMetaDataGet__" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/files/{file_id}/metadata": { + "get": { + "tags": [ + "files" + ], + "summary": "Get File Metadata", + "operationId": "get_file_metadata_v0_locations__location_id__files__file_id__metadata_get", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "user-agent", + "in": "header", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "User-Agent" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { + "$ref": "#/components/schemas/Envelope_FileMetaDataGet_" + }, + { + "$ref": "#/components/schemas/Envelope_FileMetaDataGetv010_" + }, + { + "$ref": "#/components/schemas/Envelope_dict_" + } + ], + "title": "Response Get File Metadata V0 Locations Location Id Files File Id Metadata Get" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/files/{file_id}": { + "get": { + "tags": [ + "files" + ], + "summary": "Download File", + "operationId": "download_file_v0_locations__location_id__files__file_id__get", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "link_type", + "in": "query", + "required": false, + "schema": { + "$ref": "#/components/schemas/LinkType", + "default": "PRESIGNED" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_FileDownloadResponse_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "put": { + "tags": [ + "files" + ], + "summary": "Upload File", + "description": "creates upload file links:\n\nThis function covers v1 and v2 versions of the handler.\nNote: calling this entrypoint on an already existing file will overwrite that file. That file will be deleted\nbefore the upload takes place.\n\nv1 rationale:\n - client calls this handler, which returns a single link (either direct S3 or presigned) to the S3 backend\n - client uploads the file\n - storage relies on lazy update to find if the file is finished uploaded (when client calls get_file_meta_data, or if the dsm_cleaner goes over it after the upload time is expired)\n\nv2 rationale:\n - client calls this handler, which returns a FileUploadSchema object containing 1 or more links (either S3/presigned links)\n - client uploads the file (by chunking it if there are more than 1 presigned link)\n - client calls complete_upload handle which will reconstruct the file on S3 backend\n - client waits for completion to finish and then the file is accessible on S3 backend\n\n\nUse-case v1: query.file_size is not defined, returns a PresignedLink model (backward compatibility)\nUse-case v1.1: if query.link_type=presigned or None, returns a presigned link (limited to a single 5GB file)\nUse-case v1.2: if query.link_type=s3, returns a s3 direct link (limited to a single 5TB file)\n\nUser-case v2: query.is_directory is True (query.file_size is forced to -1), returns an s3 path where to upload all the content of the directory\nUser-case v2: if query.file_size is defined, returns a FileUploadSchema model, expects client to call \"complete_upload\" when the file is finished uploading\nUse-case v2.1: if query.file_size == 0 and query.link_type=presigned or None, returns a single presigned link inside FileUploadSchema (limited to a single 5Gb file)\nUse-case v2.2: if query.file_size > 0 and query.link_type=presigned or None, returns 1 or more presigned links depending on the file size (limited to a single 5TB file)\nUse-case v2.3: if query.link_type=s3 and query.file_size>=0, returns a single s3 direct link (limited to a single 5TB file)", + "operationId": "upload_file_v0_locations__location_id__files__file_id__put", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "link_type", + "in": "query", + "required": false, + "schema": { + "$ref": "#/components/schemas/LinkType", + "default": "PRESIGNED" + } + }, + { + "name": "file_size", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^\\s*(\\d*\\.?\\d+)\\s*(\\w+)?" + }, + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "File Size" + } + }, + { + "name": "is_directory", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "Is Directory" + } + }, + { + "name": "sha256_checksum", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^[a-fA-F0-9]{64}$" + }, + { + "type": "null" + } + ], + "title": "Sha256 Checksum" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { + "$ref": "#/components/schemas/Envelope_FileUploadResponseV1_" + }, + { + "$ref": "#/components/schemas/Envelope_FileUploadSchema_" + } + ], + "title": "Response Upload File V0 Locations Location Id Files File Id Put" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "files" + ], + "summary": "Delete File", + "operationId": "delete_file_v0_locations__location_id__files__file_id__delete", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/files/{file_id}:abort": { + "post": { + "tags": [ + "files" + ], + "summary": "Abort Upload File", + "operationId": "abort_upload_file_v0_locations__location_id__files__file_id__abort_post", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/files/{file_id}:complete": { + "post": { + "tags": [ + "files" + ], + "summary": "Complete Upload File", + "operationId": "complete_upload_file_v0_locations__location_id__files__file_id__complete_post", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FileUploadCompletionBody" + } + } + } + }, + "responses": { + "202": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_FileUploadCompleteResponse_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/files/{file_id}:complete/futures/{future_id}": { + "post": { + "tags": [ + "files" + ], + "summary": "Is Completed Upload File", + "operationId": "is_completed_upload_file_v0_locations__location_id__files__file_id__complete_futures__future_id__post", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "future_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Future Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_FileUploadCompleteFutureResponse_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/files/{file_id}:soft-copy": { + "post": { + "tags": [ + "files" + ], + "summary": "Copy As Soft Link", + "operationId": "copy_as_soft_link_v0_files__file_id__soft_copy_post", + "parameters": [ + { + "name": "file_id", + "in": "path", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SoftCopyBody" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_FileMetaDataGet_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/paths": { + "get": { + "tags": [ + "files" + ], + "summary": "List Paths", + "description": "Returns one level of files (paginated)", + "operationId": "list_paths_v0_locations__location_id__paths_get", + "parameters": [ + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "file_filter", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "path" + }, + { + "type": "null" + } + ], + "title": "File Filter" + } + }, + { + "name": "cursor", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Cursor" + } + }, + { + "name": "size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 1000, + "minimum": 1, + "default": 50, + "title": "Size" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CursorPage___T_Customized_PathMetaDataGet_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/locations/{location_id}/paths/{path}:size": { + "post": { + "tags": [ + "files" + ], + "summary": "Compute Path Size", + "operationId": "compute_path_size_v0_locations__location_id__paths__path__size_post", + "parameters": [ + { + "name": "path", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "path", + "title": "Path" + } + }, + { + "name": "location_id", + "in": "path", + "required": true, + "schema": { + "type": "integer", + "title": "Location Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_PathTotalSizeCreate_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/simcore-s3:access": { + "post": { + "tags": [ + "simcore-s3" + ], + "summary": "Get Or Create Temporary S3 Access", + "operationId": "get_or_create_temporary_s3_access_v0_simcore_s3_access_post", + "parameters": [ + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_S3Settings_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/simcore-s3/folders": { + "post": { + "tags": [ + "simcore-s3" + ], + "summary": "Copy Folders From Project", + "operationId": "copy_folders_from_project_v0_simcore_s3_folders_post", + "parameters": [ + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FoldersBody" + } + } + } + }, + "responses": { + "202": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_TaskGet_" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/simcore-s3/folders/{folder_id}": { + "delete": { + "tags": [ + "simcore-s3" + ], + "summary": "Delete Folders Of Project", + "operationId": "delete_folders_of_project_v0_simcore_s3_folders__folder_id__delete", + "parameters": [ + { + "name": "folder_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Folder Id" + } + }, + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "node_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "title": "Node Id" + } + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/simcore-s3/files/metadata:search": { + "post": { + "tags": [ + "simcore-s3" + ], + "summary": "Search Files", + "operationId": "search_files_v0_simcore_s3_files_metadata_search_post", + "parameters": [ + { + "name": "user_id", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "exclusiveMinimum": true, + "title": "User Id", + "minimum": 0 + } + }, + { + "name": "startswith", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Startswith" + } + }, + { + "name": "sha256_checksum", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "pattern": "^[a-fA-F0-9]{64}$" + }, + { + "type": "null" + } + ], + "title": "Sha256 Checksum" + } + }, + { + "name": "kind", + "in": "query", + "required": true, + "schema": { + "const": "owned", + "type": "string", + "title": "Kind" + } + }, + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 50, + "minimum": 1, + "default": 20, + "title": "Limit" + } + }, + { + "name": "offset", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0, + "default": 0, + "title": "Offset" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Envelope_list_FileMetaDataGet__" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "AppStatusCheck": { + "properties": { + "app_name": { + "type": "string", + "title": "App Name", + "description": "Application name" + }, + "version": { + "type": "string", + "title": "Version", + "description": "Application's version" + }, + "services": { + "type": "object", + "title": "Services", + "description": "Other backend services connected from this service", + "default": {} + }, + "sessions": { + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Sessions", + "description": "Client sessions info. If single session per app, then is denoted as main", + "default": {} + }, + "url": { + "anyOf": [ + { + "type": "string", + "minLength": 1, + "format": "uri" + }, + { + "type": "null" + } + ], + "title": "Url", + "description": "Link to current resource" + }, + "diagnostics_url": { + "anyOf": [ + { + "type": "string", + "minLength": 1, + "format": "uri" + }, + { + "type": "null" + } + ], + "title": "Diagnostics Url", + "description": "Link to diagnostics report sub-resource. This MIGHT take some time to compute" + } + }, + "type": "object", + "required": [ + "app_name", + "version" + ], + "title": "AppStatusCheck" + }, + "CursorPage___T_Customized_PathMetaDataGet_": { + "properties": { + "items": { + "items": { + "$ref": "#/components/schemas/PathMetaDataGet" + }, + "type": "array", + "title": "Items" + }, + "total": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "title": "Total", + "description": "Total items" + }, + "current_page": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Current Page", + "description": "Cursor to refetch the current page" + }, + "current_page_backwards": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Current Page Backwards", + "description": "Cursor to refetch the current page starting from the last item" + }, + "previous_page": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Previous Page", + "description": "Cursor for the previous page" + }, + "next_page": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Next Page", + "description": "Cursor for the next page" + } + }, + "type": "object", + "required": [ + "items" + ], + "title": "CursorPage[~_T]Customized[PathMetaDataGet]" + }, + "DatasetMetaDataGet": { + "properties": { + "dataset_id": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "string", + "pattern": "^N:dataset:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "Dataset Id" + }, + "display_name": { + "type": "string", + "title": "Display Name" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "dataset_id", + "display_name" + ], + "title": "DatasetMetaDataGet" + }, + "Envelope_AppStatusCheck_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/AppStatusCheck" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[AppStatusCheck]" + }, + "Envelope_FileDownloadResponse_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileDownloadResponse" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[FileDownloadResponse]" + }, + "Envelope_FileMetaDataGet_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileMetaDataGet" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[FileMetaDataGet]" + }, + "Envelope_FileMetaDataGetv010_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileMetaDataGetv010" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[FileMetaDataGetv010]" + }, + "Envelope_FileUploadCompleteFutureResponse_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileUploadCompleteFutureResponse" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[FileUploadCompleteFutureResponse]" + }, + "Envelope_FileUploadCompleteResponse_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileUploadCompleteResponse" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[FileUploadCompleteResponse]" + }, + "Envelope_FileUploadResponseV1_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileUploadResponseV1" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[FileUploadResponseV1]" + }, + "Envelope_FileUploadSchema_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileUploadSchema" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[FileUploadSchema]" + }, + "Envelope_HealthCheck_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/HealthCheck" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[HealthCheck]" + }, + "Envelope_PathTotalSizeCreate_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/PathTotalSizeCreate" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[PathTotalSizeCreate]" + }, + "Envelope_S3Settings_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/S3Settings" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[S3Settings]" + }, + "Envelope_TaskGet_": { + "properties": { + "data": { + "anyOf": [ + { + "$ref": "#/components/schemas/TaskGet" + }, + { + "type": "null" + } + ] + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[TaskGet]" + }, + "Envelope_dict_": { + "properties": { + "data": { + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Data" + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[dict]" + }, + "Envelope_list_DatasetMetaDataGet__": { + "properties": { + "data": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/DatasetMetaDataGet" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Data" + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[list[DatasetMetaDataGet]]" + }, + "Envelope_list_FileLocation__": { + "properties": { + "data": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/FileLocation" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Data" + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[list[FileLocation]]" + }, + "Envelope_list_FileMetaDataGet__": { + "properties": { + "data": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/FileMetaDataGet" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Data" + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "title": "Envelope[list[FileMetaDataGet]]" + }, + "FileDownloadResponse": { + "properties": { + "link": { + "type": "string", + "minLength": 1, + "format": "uri", + "title": "Link" + } + }, + "type": "object", + "required": [ + "link" + ], + "title": "FileDownloadResponse" + }, + "FileLocation": { + "properties": { + "name": { + "type": "string", + "title": "Name" + }, + "id": { + "type": "integer", + "title": "Id" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "name", + "id" + ], + "title": "FileLocation" + }, + "FileMetaDataGet": { + "properties": { + "file_uuid": { + "type": "string", + "title": "File Uuid", + "description": "NOT a unique ID, like (api|uuid)/uuid/file_name or DATCORE folder structure" + }, + "location_id": { + "type": "integer", + "title": "Location Id", + "description": "Storage location" + }, + "project_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Project Name", + "description": "optional project name, used by frontend to display path" + }, + "node_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Node Name", + "description": "optional node name, used by frontend to display path" + }, + "file_name": { + "type": "string", + "title": "File Name", + "description": "Display name for a file" + }, + "file_id": { + "anyOf": [ + { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$" + }, + { + "type": "string", + "pattern": "^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$" + } + ], + "title": "File Id", + "description": "THIS IS the unique ID for the file. either (api|project_id)/node_id/file_name.ext for S3 and N:package:UUID for datcore" + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created At" + }, + "last_modified": { + "type": "string", + "format": "date-time", + "title": "Last Modified" + }, + "file_size": { + "anyOf": [ + { + "type": "integer", + "const": -1 + }, + { + "type": "integer", + "minimum": 0 + } + ], + "title": "File Size", + "description": "File size in bytes (-1 means invalid)", + "default": -1 + }, + "entity_tag": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Entity Tag", + "description": "Entity tag (or ETag), represents a specific version of the file, None if invalid upload or datcore" + }, + "is_soft_link": { + "type": "boolean", + "title": "Is Soft Link", + "description": "If true, this file is a soft link.i.e. is another entry with the same object_name", + "default": false + }, + "is_directory": { + "type": "boolean", + "title": "Is Directory", + "description": "if True this is a directory", + "default": false + }, + "sha256_checksum": { + "anyOf": [ + { + "type": "string", + "pattern": "^[a-fA-F0-9]{64}$" + }, + { + "type": "null" + } + ], + "title": "Sha256 Checksum", + "description": "SHA256 message digest of the file content. Main purpose: cheap lookup." + } + }, + "type": "object", + "required": [ + "file_uuid", + "location_id", + "file_name", + "file_id", + "created_at", + "last_modified" + ], + "title": "FileMetaDataGet" + }, + "FileMetaDataGetv010": { + "properties": { + "file_uuid": { + "type": "string", + "title": "File Uuid" + }, + "location_id": { + "type": "integer", + "title": "Location Id" + }, + "location": { + "type": "string", + "title": "Location" + }, + "bucket_name": { + "type": "string", + "title": "Bucket Name" + }, + "object_name": { + "type": "string", + "title": "Object Name" + }, + "project_id": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "title": "Project Id" + }, + "project_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Project Name" + }, + "node_id": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "title": "Node Id" + }, + "node_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Node Name" + }, + "file_name": { + "type": "string", + "title": "File Name" + }, + "user_id": { + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "User Id" + }, + "user_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "User Name" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "file_uuid", + "location_id", + "location", + "bucket_name", + "object_name", + "project_id", + "project_name", + "node_id", + "node_name", + "file_name", + "user_id", + "user_name" + ], + "title": "FileMetaDataGetv010" + }, + "FileUploadCompleteFutureResponse": { + "properties": { + "state": { + "$ref": "#/components/schemas/FileUploadCompleteState" + }, + "e_tag": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "E Tag" + } + }, + "type": "object", + "required": [ + "state" + ], + "title": "FileUploadCompleteFutureResponse" + }, + "FileUploadCompleteLinks": { + "properties": { + "state": { + "type": "string", + "minLength": 1, + "format": "uri", + "title": "State" + } + }, + "type": "object", + "required": [ + "state" + ], + "title": "FileUploadCompleteLinks" + }, + "FileUploadCompleteResponse": { + "properties": { + "links": { + "$ref": "#/components/schemas/FileUploadCompleteLinks" + } + }, + "type": "object", + "required": [ + "links" + ], + "title": "FileUploadCompleteResponse" + }, + "FileUploadCompleteState": { + "type": "string", + "enum": [ + "ok", + "nok" + ], + "title": "FileUploadCompleteState" + }, + "FileUploadCompletionBody": { + "properties": { + "parts": { + "items": { + "$ref": "#/components/schemas/UploadedPart" + }, + "type": "array", + "title": "Parts" + } + }, + "type": "object", + "required": [ + "parts" + ], + "title": "FileUploadCompletionBody" + }, + "FileUploadLinks": { + "properties": { + "abort_upload": { + "type": "string", + "minLength": 1, + "format": "uri", + "title": "Abort Upload" + }, + "complete_upload": { + "type": "string", + "minLength": 1, + "format": "uri", + "title": "Complete Upload" + } + }, + "type": "object", + "required": [ + "abort_upload", + "complete_upload" + ], + "title": "FileUploadLinks" + }, + "FileUploadResponseV1": { + "properties": { + "link": { + "type": "string", + "minLength": 1, + "format": "uri", + "title": "Link" + } + }, + "type": "object", + "required": [ + "link" + ], + "title": "FileUploadResponseV1" + }, + "FileUploadSchema": { + "properties": { + "chunk_size": { + "type": "integer", + "minimum": 0, + "title": "Chunk Size" + }, + "urls": { + "items": { + "type": "string", + "minLength": 1, + "format": "uri" + }, + "type": "array", + "title": "Urls" + }, + "links": { + "$ref": "#/components/schemas/FileUploadLinks" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "chunk_size", + "urls", + "links" + ], + "title": "FileUploadSchema" + }, + "FoldersBody": { + "properties": { + "source": { + "type": "object", + "title": "Source" + }, + "destination": { + "type": "object", + "title": "Destination" + }, + "nodes_map": { + "additionalProperties": { + "type": "string", + "format": "uuid" + }, + "propertyNames": { + "format": "uuid" + }, + "type": "object", + "title": "Nodes Map" + } + }, + "type": "object", + "title": "FoldersBody" + }, + "HTTPValidationError": { + "properties": { + "errors": { + "items": { + "$ref": "#/components/schemas/ValidationError" + }, + "type": "array", + "title": "Validation errors" + } + }, + "type": "object", + "title": "HTTPValidationError" + }, + "HealthCheck": { + "properties": { + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Name" + }, + "status": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Status" + }, + "api_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Api Version" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Version" + } + }, + "type": "object", + "required": [ + "name", + "status", + "api_version", + "version" + ], + "title": "HealthCheck" + }, + "LinkType": { + "type": "string", + "enum": [ + "PRESIGNED", + "S3" + ], + "title": "LinkType" + }, + "PathMetaDataGet": { + "properties": { + "path": { + "type": "string", + "format": "path", + "title": "Path", + "description": "the path to the current path" + }, + "display_path": { + "type": "string", + "format": "path", + "title": "Display Path", + "description": "the path to display with UUID replaced (URL Encoded by parts as names may contain '/')" + }, + "file_meta_data": { + "anyOf": [ + { + "$ref": "#/components/schemas/FileMetaDataGet" + }, + { + "type": "null" + } + ], + "description": "if filled, this is the file meta data of the s3 object" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "path", + "display_path" + ], + "title": "PathMetaDataGet" + }, + "PathTotalSizeCreate": { + "properties": { + "path": { + "type": "string", + "format": "path", + "title": "Path" + }, + "size": { + "type": "integer", + "minimum": 0, + "title": "Size" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "path", + "size" + ], + "title": "PathTotalSizeCreate" + }, + "S3Settings": { + "properties": { + "S3_ACCESS_KEY": { + "type": "string", + "maxLength": 50, + "minLength": 1, + "title": "S3 Access Key" + }, + "S3_BUCKET_NAME": { + "type": "string", + "maxLength": 50, + "minLength": 1, + "title": "S3 Bucket Name" + }, + "S3_ENDPOINT": { + "anyOf": [ + { + "type": "string", + "minLength": 1, + "format": "uri" + }, + { + "type": "null" + } + ], + "title": "S3 Endpoint", + "description": "do not define if using standard AWS" + }, + "S3_REGION": { + "type": "string", + "maxLength": 50, + "minLength": 1, + "title": "S3 Region" + }, + "S3_SECRET_KEY": { + "type": "string", + "maxLength": 50, + "minLength": 1, + "title": "S3 Secret Key" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "S3_ACCESS_KEY", + "S3_BUCKET_NAME", + "S3_REGION", + "S3_SECRET_KEY" + ], + "title": "S3Settings" + }, + "SoftCopyBody": { + "properties": { + "link_id": { + "type": "string", + "pattern": "^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\\/(.+)$", + "title": "Link Id" + } + }, + "type": "object", + "required": [ + "link_id" + ], + "title": "SoftCopyBody" + }, + "TaskGet": { + "properties": { + "task_id": { + "type": "string", + "title": "Task Id" + }, + "task_name": { + "type": "string", + "title": "Task Name" + }, + "status_href": { + "type": "string", + "title": "Status Href" + }, + "result_href": { + "type": "string", + "title": "Result Href" + }, + "abort_href": { + "type": "string", + "title": "Abort Href" + } + }, + "type": "object", + "required": [ + "task_id", + "task_name", + "status_href", + "result_href", + "abort_href" + ], + "title": "TaskGet" + }, + "UploadedPart": { + "properties": { + "number": { + "type": "integer", + "exclusiveMinimum": true, + "title": "Number", + "minimum": 0 + }, + "e_tag": { + "type": "string", + "title": "E Tag" + } + }, + "type": "object", + "required": [ + "number", + "e_tag" + ], + "title": "UploadedPart" + }, + "ValidationError": { + "properties": { + "loc": { + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + }, + "type": "array", + "title": "Location" + }, + "msg": { + "type": "string", + "title": "Message" + }, + "type": { + "type": "string", + "title": "Error Type" + } + }, + "type": "object", + "required": [ + "loc", + "msg", + "type" + ], + "title": "ValidationError" + } + } + } +} diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index 5b1c3f13098..cf0ccfdba89 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -11,17 +11,22 @@ --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in -# service-library[aiohttp] --requirement ../../../packages/service-library/requirements/_base.in ---requirement ../../../packages/service-library/requirements/_aiohttp.in +--requirement ../../../packages/service-library/requirements/_fastapi.in aioboto3 # s3 storage aiofiles # i/o -aiohttp # server -aiohttp-swagger[performance] # server -aiopg[sa] # db +asgi_lifespan +asyncpg # database +celery[redis] +httpx opentelemetry-instrumentation-botocore packaging +fastapi[all] +fastapi-pagination +orjson +pydantic[dotenv] tenacity typer types-aiobotocore[s3] # s3 storage +watchdog diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index df1f0513a0e..99c101b84d6 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -1,14 +1,14 @@ -aio-pika==9.4.1 +aio-pika==9.5.4 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aioboto3==13.1.0 +aioboto3==13.3.0 # via # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/_base.in -aiobotocore==2.13.1 +aiobotocore==2.16.0 # via aioboto3 -aiocache==0.12.2 +aiocache==0.12.3 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/_base.in @@ -17,17 +17,19 @@ aiodebug==2.3.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aiodocker==0.21.0 +aiodocker==0.24.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aiofiles==23.2.1 +aiofiles==24.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in # aioboto3 -aiohttp==3.9.3 +aiohappyeyeballs==2.4.6 + # via aiohttp +aiohttp==3.11.12 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -55,31 +57,27 @@ aiohttp==3.9.3 # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt - # -r requirements/../../../packages/service-library/requirements/_aiohttp.in - # -r requirements/_base.in # aiobotocore # aiodocker - # aiohttp-swagger -aiohttp-swagger==1.0.16 - # via -r requirements/_base.in -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore -aiopg==1.4.0 - # via - # -r requirements/../../../packages/service-library/requirements/_aiohttp.in - # -r requirements/_base.in -aiormq==6.8.0 +aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp -alembic==1.13.1 +alembic==1.14.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +amqp==5.3.1 + # via kombu annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.8.0 # via # fast-depends # faststream + # httpx + # starlette + # watchfiles arrow==1.3.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in @@ -89,28 +87,60 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 +asgi-lifespan==2.1.0 + # via -r requirements/_base.in +asgiref==3.8.1 + # via opentelemetry-instrumentation-asgi +asyncpg==0.30.0 # via - # aiopg - # asyncpg -asyncpg==0.29.0 - # via sqlalchemy -attrs==23.2.0 + # -r requirements/_base.in + # sqlalchemy +attrs==25.1.0 # via - # -r requirements/../../../packages/service-library/requirements/_aiohttp.in # aiohttp # jsonschema # referencing -boto3==1.34.131 - # via aiobotocore -botocore==1.34.131 +billiard==4.2.1 + # via celery +boto3==1.35.81 + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiobotocore +botocore==1.35.81 # via # aiobotocore # boto3 # s3transfer -botocore-stubs==1.34.69 +botocore-stubs==1.36.17 # via types-aiobotocore -certifi==2024.2.2 +celery==5.4.0 + # via -r requirements/_base.in +certifi==2025.1.31 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -138,48 +168,120 @@ certifi==2024.2.2 # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # httpcore + # httpx # requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.1 # via requests -click==8.1.7 - # via typer -deprecated==1.2.14 +click==8.1.8 + # via + # celery + # click-didyoumean + # click-plugins + # click-repl + # rich-toolkit + # typer + # uvicorn +click-didyoumean==0.3.1 + # via celery +click-plugins==1.1.1 + # via celery +click-repl==0.3.0 + # via celery +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-semantic-conventions -dnspython==2.6.1 +dnspython==2.7.0 # via email-validator -email-validator==2.1.1 - # via pydantic +email-validator==2.2.0 + # via + # fastapi + # pydantic +exceptiongroup==1.2.2 + # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.31 +fastapi==0.115.8 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # fastapi-lifespan-manager +fastapi-cli==0.0.7 + # via fastapi +fastapi-lifespan-manager==0.1.4 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +fastapi-pagination==0.12.34 + # via -r requirements/_base.in +faststream==0.5.34 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -frozenlist==1.4.1 +frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.65.0 +googleapis-common-protos==1.66.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -grpcio==1.66.0 +grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc -idna==3.6 +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.7 + # via httpx +httptools==0.6.4 + # via uvicorn +httpx==0.28.1 + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # fastapi +idna==3.10 # via # anyio # email-validator + # httpx # requests # yarl -importlib-metadata==8.0.0 +importlib-metadata==8.5.0 # via opentelemetry-api -jinja2==3.1.3 +itsdangerous==2.2.0 + # via fastapi +jinja2==3.1.5 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -207,21 +309,22 @@ jinja2==3.1.3 # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt - # aiohttp-swagger + # fastapi jmespath==1.0.1 # via # boto3 # botocore -jsonschema==4.21.1 +jsonschema==4.23.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in - # -r requirements/../../../packages/service-library/requirements/_aiohttp.in -jsonschema-specifications==2023.7.1 +jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.2 +kombu==5.4.2 + # via celery +mako==1.3.9 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -252,114 +355,111 @@ mako==1.3.2 # alembic markdown-it-py==3.0.0 # via rich -markupsafe==2.1.5 +markupsafe==3.0.2 # via # jinja2 # mako - # werkzeug mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.26.0 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation - # opentelemetry-instrumentation-aiohttp-client - # opentelemetry-instrumentation-aiohttp-server - # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore - # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.30.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.51b0 # via - # opentelemetry-instrumentation-aiohttp-client - # opentelemetry-instrumentation-aiohttp-server - # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore - # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aiohttp-client==0.47b0 - # via -r requirements/../../../packages/service-library/requirements/_aiohttp.in -opentelemetry-instrumentation-aiohttp-server==0.47b0 - # via -r requirements/../../../packages/service-library/requirements/_aiohttp.in -opentelemetry-instrumentation-aiopg==0.47b0 - # via -r requirements/../../../packages/service-library/requirements/_aiohttp.in -opentelemetry-instrumentation-asyncpg==0.47b0 +opentelemetry-instrumentation-asgi==0.51b0 + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-asyncpg==0.51b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-botocore==0.47b0 +opentelemetry-instrumentation-botocore==0.51b0 # via # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/_base.in -opentelemetry-instrumentation-dbapi==0.47b0 - # via opentelemetry-instrumentation-aiopg -opentelemetry-instrumentation-logging==0.47b0 +opentelemetry-instrumentation-fastapi==0.51b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.51b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-logging==0.51b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.47b0 +opentelemetry-instrumentation-redis==0.51b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.51b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-propagator-aws-xray==1.0.1 +opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.51b0 # via - # opentelemetry-instrumentation-aiohttp-client - # opentelemetry-instrumentation-aiohttp-server + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore - # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.51b0 # via - # opentelemetry-instrumentation-aiohttp-client - # opentelemetry-instrumentation-aiohttp-server + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests -orjson==3.10.0 +orjson==3.10.15 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -404,25 +504,39 @@ orjson==3.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -packaging==24.0 - # via -r requirements/_base.in + # -r requirements/_base.in + # fastapi +packaging==24.2 + # via + # -r requirements/_base.in + # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq -prometheus-client==0.20.0 - # via -r requirements/../../../packages/service-library/requirements/_aiohttp.in -protobuf==4.25.4 +prometheus-client==0.21.1 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # prometheus-fastapi-instrumentator +prometheus-fastapi-instrumentator==7.0.2 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +prompt-toolkit==3.0.50 + # via click-repl +propcache==0.2.1 + # via + # aiohttp + # yarl +protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.0.0 +psutil==6.1.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -psycopg2-binary==2.9.9 - # via - # aiopg - # sqlalchemy -pydantic==2.10.2 +psycopg2-binary==2.9.10 + # via sqlalchemy +pycryptodome==3.21.0 + # via stream-zip +pydantic==2.10.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -475,12 +589,15 @@ pydantic==2.10.2 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/_base.in # fast-depends + # fastapi + # fastapi-pagination # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via pydantic -pydantic-extra-types==2.9.0 +pydantic-extra-types==2.10.2 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -499,8 +616,35 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 + # fastapi +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -509,9 +653,10 @@ pydantic-settings==2.6.1 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -pygments==2.17.2 + # fastapi +pygments==2.19.1 # via rich -pyinstrument==4.6.2 +pyinstrument==5.0.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -519,9 +664,14 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore + # celery python-dotenv==1.0.1 - # via pydantic-settings -pyyaml==6.0.1 + # via + # pydantic-settings + # uvicorn +python-multipart==0.0.20 + # via fastapi +pyyaml==6.0.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -551,7 +701,8 @@ pyyaml==6.0.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in - # aiohttp-swagger + # fastapi + # uvicorn redis==5.2.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -582,36 +733,66 @@ redis==5.2.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -referencing==0.29.3 + # celery +referencing==0.35.1 # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.2 +requests==2.32.3 # via opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==13.9.4 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # rich-toolkit # typer -rpds-py==0.18.0 +rich-toolkit==0.13.2 + # via fastapi-cli +rpds-py==0.22.3 # via # jsonschema # referencing -s3transfer==0.10.1 +s3transfer==0.10.4 # via boto3 -setuptools==74.0.0 - # via opentelemetry-instrumentation -sh==2.0.6 +sh==2.2.1 # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio -sqlalchemy==1.4.52 + # via + # anyio + # asgi-lifespan +sqlalchemy==1.4.54 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -640,57 +821,99 @@ sqlalchemy==1.4.52 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in - # aiopg # alembic -tenacity==8.5.0 +starlette==0.45.3 + # via + # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi + # prometheus-fastapi-instrumentator +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +tenacity==9.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -toolz==0.12.1 +toolz==1.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -tqdm==4.66.2 +tqdm==4.67.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.3 +typer==0.15.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in -types-aiobotocore==2.12.1 + # fastapi-cli +types-aiobotocore==2.19.0 # via # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/_base.in -types-aiobotocore-ec2==2.12.3 +types-aiobotocore-ec2==2.19.0 # via types-aiobotocore -types-aiobotocore-s3==2.12.1 +types-aiobotocore-s3==2.19.0.post1 # via types-aiobotocore -types-aiobotocore-ssm==2.12.3 +types-aiobotocore-ssm==2.19.0 # via types-aiobotocore -types-awscrt==0.20.5 +types-awscrt==0.23.10 # via botocore-stubs -types-python-dateutil==2.9.0.20240316 +types-python-dateutil==2.9.0.20241206 # via arrow typing-extensions==4.12.2 # via # aiodebug - # aiodocker # alembic + # anyio + # fastapi + # fastapi-pagination # faststream # opentelemetry-sdk # pydantic # pydantic-core + # pydantic-extra-types + # rich-toolkit # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm -ujson==5.9.0 +tzdata==2025.1 + # via + # celery + # kombu +ujson==5.10.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -718,8 +941,8 @@ ujson==5.9.0 # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt - # aiohttp-swagger -urllib3==2.2.3 + # fastapi +urllib3==2.3.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -749,19 +972,34 @@ urllib3==2.2.3 # -c requirements/../../../requirements/constraints.txt # botocore # requests -werkzeug==3.0.2 - # via -r requirements/../../../packages/service-library/requirements/_aiohttp.in -wrapt==1.16.0 +uvicorn==0.34.0 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # fastapi + # fastapi-cli +uvloop==0.21.0 + # via uvicorn +vine==5.1.0 + # via + # amqp + # celery + # kombu +watchdog==6.0.0 + # via -r requirements/_base.in +watchfiles==1.0.4 + # via uvicorn +wcwidth==0.2.13 + # via prompt-toolkit +websockets==14.2 + # via uvicorn +wrapt==1.17.2 # via # aiobotocore # deprecated # opentelemetry-instrumentation - # opentelemetry-instrumentation-aiohttp-client - # opentelemetry-instrumentation-aiohttp-server - # opentelemetry-instrumentation-aiopg - # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis -yarl==1.9.4 +yarl==1.18.3 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in @@ -769,5 +1007,5 @@ yarl==1.9.4 # aio-pika # aiohttp # aiormq -zipp==3.20.1 +zipp==3.21.0 # via importlib-metadata diff --git a/services/storage/requirements/_test.in b/services/storage/requirements/_test.in index 33b53a495bc..1af7ed5e188 100644 --- a/services/storage/requirements/_test.in +++ b/services/storage/requirements/_test.in @@ -6,24 +6,27 @@ --constraint _base.txt -aioresponses +asyncpg-stubs coverage docker faker fakeredis[lua] +flaky jsonref moto[server] pandas pytest -pytest-aiohttp pytest-asyncio +pytest-celery pytest-cov + pytest-icdiff pytest-instafail pytest-mock pytest-runner pytest-sugar python-dotenv +respx simcore-service-storage-sdk @ git+https://github.com/ITISFoundation/osparc-simcore.git@cfdf4f86d844ebb362f4f39e9c6571d561b72897#subdirectory=services/storage/client-sdk/python # to test backwards compatibility against deprecated client-sdk (used still in old versions of simcore-sdk) sqlalchemy[mypy] # adds Mypy / Pep-484 Support for ORM Mappings SEE https://docs.sqlalchemy.org/en/20/orm/extensions/mypy.html types-aiofiles diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index 26d91990499..c804dfd46d9 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -1,111 +1,174 @@ -aiohttp==3.9.3 +aiohappyeyeballs==2.4.6 + # via + # -c requirements/_base.txt + # aiohttp +aiohttp==3.11.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt - # aioresponses - # pytest-aiohttp # simcore-service-storage-sdk -aioresponses==0.7.8 - # via -r requirements/_test.in -aiosignal==1.3.1 +aiosignal==1.3.2 # via # -c requirements/_base.txt # aiohttp +amqp==5.3.1 + # via + # -c requirements/_base.txt + # kombu annotated-types==0.7.0 # via # -c requirements/_base.txt # pydantic antlr4-python3-runtime==4.13.2 # via moto -attrs==23.2.0 +anyio==4.8.0 + # via + # -c requirements/_base.txt + # httpx +asyncpg==0.30.0 + # via + # -c requirements/_base.txt + # asyncpg-stubs +asyncpg-stubs==0.30.0 + # via -r requirements/_test.in +attrs==25.1.0 # via # -c requirements/_base.txt # aiohttp # jsonschema # referencing -aws-sam-translator==1.94.0 +aws-sam-translator==1.95.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto +billiard==4.2.1 + # via + # -c requirements/_base.txt + # celery blinker==1.9.0 # via flask -boto3==1.34.131 +boto3==1.35.81 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator # moto -botocore==1.34.131 +botocore==1.35.81 # via # -c requirements/_base.txt # aws-xray-sdk # boto3 # moto # s3transfer -certifi==2024.2.2 +celery==5.4.0 + # via + # -c requirements/_base.txt + # pytest-celery +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt + # httpcore + # httpx # requests # simcore-service-storage-sdk cffi==1.17.1 # via cryptography -cfn-lint==1.22.7 +cfn-lint==1.27.0 # via moto -charset-normalizer==3.3.2 +charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt + # celery + # click-didyoumean + # click-plugins + # click-repl # flask -coverage==7.6.10 +click-didyoumean==0.3.1 + # via + # -c requirements/_base.txt + # celery +click-plugins==1.1.1 + # via + # -c requirements/_base.txt + # celery +click-repl==0.3.0 + # via + # -c requirements/_base.txt + # celery +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.2 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto +debugpy==1.8.12 + # via pytest-celery docker==7.1.0 # via # -r requirements/_test.in # moto -faker==35.0.0 + # pytest-celery + # pytest-docker-tools +faker==36.1.1 + # via -r requirements/_test.in +fakeredis==2.27.0 # via -r requirements/_test.in -fakeredis==2.26.2 +flaky==3.8.1 # via -r requirements/_test.in -flask==3.0.3 +flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.0 +flask-cors==5.0.1 # via moto -frozenlist==1.4.1 +frozenlist==1.5.0 # via # -c requirements/_base.txt # aiohttp # aiosignal graphql-core==3.2.6 # via moto -greenlet==3.0.3 +greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy +h11==0.14.0 + # via + # -c requirements/_base.txt + # httpcore +httpcore==1.0.7 + # via + # -c requirements/_base.txt + # httpx +httpx==0.28.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # respx icdiff==2.0.7 # via pytest-icdiff -idna==3.6 +idna==3.10 # via # -c requirements/_base.txt + # anyio + # httpx # requests # yarl iniconfig==2.0.0 # via pytest itsdangerous==2.2.0 - # via flask -jinja2==3.1.3 + # via + # -c requirements/_base.txt + # flask +jinja2==3.1.5 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -116,9 +179,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.2 - # via moto -jsondiff==2.2.1 +joserfc==1.0.4 # via moto jsonpatch==1.33 # via cfn-lint @@ -128,7 +189,7 @@ jsonpointer==3.0.0 # via jsonpatch jsonref==1.1.0 # via -r requirements/_test.in -jsonschema==4.21.1 +jsonschema==4.23.0 # via # -c requirements/_base.txt # aws-sam-translator @@ -136,47 +197,48 @@ jsonschema==4.21.1 # openapi-spec-validator jsonschema-path==0.3.4 # via openapi-spec-validator -jsonschema-specifications==2023.7.1 +jsonschema-specifications==2024.10.1 # via # -c requirements/_base.txt # jsonschema # openapi-schema-validator +kombu==5.4.2 + # via + # -c requirements/_base.txt + # celery lazy-object-proxy==1.10.0 # via openapi-spec-validator lupa==2.4 # via fakeredis -markupsafe==2.1.5 +markupsafe==3.0.2 # via # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.20 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +moto==5.1.1 + # via -r requirements/_test.in mpmath==1.3.0 # via sympy -multidict==6.0.5 +multidict==6.1.0 # via # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy networkx==3.4.2 # via cfn-lint -numpy==2.2.2 +numpy==2.2.3 # via pandas openapi-schema-validator==0.6.3 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via moto -packaging==24.0 +packaging==24.2 # via # -c requirements/_base.txt - # aioresponses # pytest # pytest-sugar pandas==2.2.3 @@ -189,40 +251,54 @@ ply==3.11 # via jsonpath-ng pprintpp==0.4.0 # via pytest-icdiff -py-partiql-parser==0.5.6 +prompt-toolkit==3.0.50 + # via + # -c requirements/_base.txt + # click-repl +propcache==0.2.1 + # via + # -c requirements/_base.txt + # aiohttp + # yarl +psutil==6.1.1 + # via + # -c requirements/_base.txt + # pytest-celery +py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.2 +pydantic==2.10.6 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.27.1 +pydantic-core==2.27.2 # via # -c requirements/_base.txt # pydantic pyparsing==3.2.1 # via moto -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in - # pytest-aiohttp # pytest-asyncio # pytest-cov + # pytest-docker-tools # pytest-icdiff # pytest-instafail # pytest-mock # pytest-sugar -pytest-aiohttp==1.0.5 - # via -r requirements/_test.in pytest-asyncio==0.23.8 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in - # pytest-aiohttp +pytest-celery==1.1.3 + # via -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in +pytest-docker-tools==3.1.3 + # via pytest-celery pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 @@ -237,7 +313,7 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore - # faker + # celery # moto # pandas # simcore-service-storage-sdk @@ -245,14 +321,13 @@ python-dotenv==1.0.1 # via # -c requirements/_base.txt # -r requirements/_test.in -pytz==2024.2 +pytz==2025.1 # via pandas -pyyaml==6.0.1 +pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # cfn-lint - # jsondiff # jsonschema-path # moto # responses @@ -261,7 +336,7 @@ redis==5.2.1 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # fakeredis -referencing==0.29.3 +referencing==0.35.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -270,7 +345,7 @@ referencing==0.29.3 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.2 +requests==2.32.3 # via # -c requirements/_base.txt # docker @@ -279,32 +354,38 @@ requests==2.32.2 # responses responses==0.25.6 # via moto +respx==0.22.0 + # via -r requirements/_test.in rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.18.0 +rpds-py==0.22.3 # via # -c requirements/_base.txt # jsonschema # referencing -s3transfer==0.10.1 +s3transfer==0.10.4 # via # -c requirements/_base.txt # boto3 -setuptools==74.0.0 +setuptools==75.8.2 # via - # -c requirements/_base.txt # moto + # pytest-celery simcore-service-storage-sdk @ git+https://github.com/ITISFoundation/osparc-simcore.git@cfdf4f86d844ebb362f4f39e9c6571d561b72897#subdirectory=services/storage/client-sdk/python # via -r requirements/_test.in -six==1.16.0 +six==1.17.0 # via # -c requirements/_base.txt # python-dateutil # rfc3339-validator # simcore-service-storage-sdk +sniffio==1.3.1 + # via + # -c requirements/_base.txt + # anyio sortedcontainers==2.4.0 # via fakeredis -sqlalchemy==1.4.52 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -313,6 +394,10 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sympy==1.13.3 # via cfn-lint +tenacity==9.0.0 + # via + # -c requirements/_base.txt + # pytest-celery termcolor==2.5.0 # via pytest-sugar types-aiofiles==24.1.0.20241221 @@ -320,16 +405,22 @@ types-aiofiles==24.1.0.20241221 typing-extensions==4.12.2 # via # -c requirements/_base.txt + # anyio + # asyncpg-stubs # aws-sam-translator # cfn-lint - # faker # mypy # pydantic # pydantic-core # sqlalchemy2-stubs tzdata==2025.1 - # via pandas -urllib3==2.2.3 + # via + # -c requirements/_base.txt + # celery + # faker + # kombu + # pandas +urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -338,18 +429,28 @@ urllib3==2.2.3 # requests # responses # simcore-service-storage-sdk -werkzeug==3.0.2 +vine==5.1.0 # via # -c requirements/_base.txt + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via + # -c requirements/_base.txt + # prompt-toolkit +werkzeug==3.1.3 + # via # flask + # flask-cors # moto -wrapt==1.16.0 +wrapt==1.17.2 # via # -c requirements/_base.txt # aws-xray-sdk xmltodict==0.14.2 # via moto -yarl==1.9.4 +yarl==1.18.3 # via # -c requirements/_base.txt # aiohttp diff --git a/services/storage/requirements/_tools.txt b/services/storage/requirements/_tools.txt index 7bc674ecdf6..d57a2d475d5 100644 --- a/services/storage/requirements/_tools.txt +++ b/services/storage/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.1.8 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -20,15 +20,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -39,7 +39,7 @@ mypy-extensions==1.0.0 # mypy nodeenv==1.9.1 # via pre-commit -packaging==24.0 +packaging==24.2 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -47,7 +47,7 @@ packaging==24.0 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -58,24 +58,23 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via # build # pip-tools -pyyaml==6.0.1 +pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 +setuptools==75.8.2 # via - # -c requirements/_base.txt # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 @@ -85,9 +84,11 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 - # via -r requirements/_tools.in + # via + # -c requirements/_base.txt + # -r requirements/_tools.in wheel==0.45.1 # via pip-tools diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index 26d5d78bff9..31b66afbe90 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -17,7 +17,7 @@ simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ -simcore-service-library[aiohttp] @ ../../packages/service-library +simcore-service-library[fastapi] @ ../../packages/service-library simcore-settings-library @ ../../packages/settings-library/ # installs current package diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt index 97aefedee51..253cec8dbcb 100644 --- a/services/storage/requirements/dev.txt +++ b/services/storage/requirements/dev.txt @@ -17,7 +17,7 @@ --editable ../../packages/models-library --editable ../../packages/postgres-database/ --editable ../../packages/pytest-simcore/ ---editable ../../packages/service-library[aiohttp] +--editable ../../packages/service-library[fastapi] --editable ../../packages/settings-library/ # installs current package diff --git a/services/storage/requirements/prod.txt b/services/storage/requirements/prod.txt index 5e0703f83a7..dc4a2da4805 100644 --- a/services/storage/requirements/prod.txt +++ b/services/storage/requirements/prod.txt @@ -14,7 +14,7 @@ simcore-aws-library @ ../../packages/aws-library/ simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ -simcore-service-library[aiohttp] @ ../../packages/service-library +simcore-service-library[fastapi] @ ../../packages/service-library simcore-settings-library @ ../../packages/settings-library/ # installs current package diff --git a/services/storage/setup.cfg b/services/storage/setup.cfg index c703b2e8101..a185ddfb0a4 100644 --- a/services/storage/setup.cfg +++ b/services/storage/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.5.0 +current_version = 0.6.0 commit = True message = services/storage api version: {current_version} → {new_version} tag = False @@ -7,8 +7,6 @@ commit_args = --no-verify [bumpversion:file:VERSION] -[bumpversion:file:./src/simcore_service_storage/api/v0/openapi.yaml] - [tool:pytest] asyncio_mode = auto markers = diff --git a/services/storage/setup.py b/services/storage/setup.py index 2a0ca0d9c41..0ee0153333d 100644 --- a/services/storage/setup.py +++ b/services/storage/setup.py @@ -25,7 +25,7 @@ def read_reqs(reqs_path: Path) -> set[str]: | { "simcore-models-library", "simcore-postgres-database", - "simcore-service-library[aiohttp]>=1.2.0", + "simcore-service-library[fastapi]>=1.2.0", "simcore-settings-library", } ) diff --git a/services/storage/src/simcore_service_storage/_meta.py b/services/storage/src/simcore_service_storage/_meta.py index 8a0a28b89ea..7f5013dd0c3 100644 --- a/services/storage/src/simcore_service_storage/_meta.py +++ b/services/storage/src/simcore_service_storage/_meta.py @@ -27,3 +27,18 @@ """.format( f"v{__version__}" ) + +APP_WORKER_STARTED_BANNER_MSG = r""" + + ____ _ __ __ _ + / ___|| |_ ___ _ __ __ _ __ _ ___ \ \ / /__ _ __| | _____ _ __ + \___ \| __/ _ \| '__/ _` |/ _` |/ _ \____\ \ /\ / / _ \| '__| |/ / _ \ '__| + ___) | || (_) | | | (_| | (_| | __/_____\ V V / (_) | | | < __/ | + |____/ \__\___/|_| \__,_|\__, |\___| \_/\_/ \___/|_| |_|\_\___|_| + |___/ {} + +""".format( + f"v{__version__}" +) + +APP_FINISHED_BANNER_MSG = info.get_finished_banner() diff --git a/services/storage/src/simcore_service_storage/api/READ_ONLY_FOLDER b/services/storage/src/simcore_service_storage/api/READ_ONLY_FOLDER deleted file mode 100644 index 28208879513..00000000000 --- a/services/storage/src/simcore_service_storage/api/READ_ONLY_FOLDER +++ /dev/null @@ -1 +0,0 @@ -To modify the OpenAPI Specs, please do it in api/specs/storage and then run `make openapi-specs` in the current dir diff --git a/services/web/server/src/simcore_service_webserver/projects/_common/__init__.py b/services/storage/src/simcore_service_storage/api/__init__.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/projects/_common/__init__.py rename to services/storage/src/simcore_service_storage/api/__init__.py diff --git a/services/web/server/src/simcore_service_webserver/version_control/__init__.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/__init__.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/version_control/__init__.py rename to services/storage/src/simcore_service_storage/api/_worker_tasks/__init__.py diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py new file mode 100644 index 00000000000..fae0bdc770c --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -0,0 +1,27 @@ +import logging +from pathlib import Path + +from celery import Task # type: ignore[import-untyped] +from models_library.projects_nodes_io import LocationID +from models_library.users import UserID +from pydantic import ByteSize +from servicelib.logging_utils import log_context + +from ...dsm import get_dsm_provider +from ...modules.celery.models import TaskId +from ...modules.celery.utils import get_fastapi_app + +_logger = logging.getLogger(__name__) + + +async def compute_path_size( + task: Task, task_id: TaskId, user_id: UserID, location_id: LocationID, path: Path +) -> ByteSize: + assert task_id # nosec + with log_context( + _logger, + logging.INFO, + msg=f"computing path size {user_id=}, {location_id=}, {path=}", + ): + dsm = get_dsm_provider(get_fastapi_app(task.app)).get(location_id) + return await dsm.compute_path_size(user_id, path=Path(path)) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py new file mode 100644 index 00000000000..557013de976 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py @@ -0,0 +1,22 @@ +import logging + +from celery import Celery # type: ignore[import-untyped] +from servicelib.logging_utils import log_context + +from ...modules.celery._celery_types import register_celery_types +from ...modules.celery._task import define_task +from ...modules.celery.tasks import export_data +from ._paths import compute_path_size + +_logger = logging.getLogger(__name__) + + +def setup_worker_tasks(app: Celery) -> None: + register_celery_types() + with log_context( + _logger, + logging.INFO, + msg="Storage setup Worker Tasks", + ): + define_task(app, export_data) + define_task(app, compute_path_size) diff --git a/services/storage/src/simcore_service_storage/api/rest/__init__.py b/services/storage/src/simcore_service_storage/api/rest/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/api/rest/_datasets.py b/services/storage/src/simcore_service_storage/api/rest/_datasets.py new file mode 100644 index 00000000000..76e6e185068 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/_datasets.py @@ -0,0 +1,58 @@ +import logging +from typing import Annotated + +from fastapi import APIRouter, Depends, Request +from models_library.api_schemas_storage.storage_schemas import ( + DatasetMetaDataGet, + FileMetaDataGet, +) +from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID + +from ...dsm import get_dsm_provider +from ...models import FilesMetadataDatasetQueryParams, StorageQueryParamsBase + +_logger = logging.getLogger(__name__) + +router = APIRouter( + tags=[ + "datasets", + ], +) + + +@router.get( + "/locations/{location_id}/datasets", + response_model=Envelope[list[DatasetMetaDataGet]], +) +async def list_datasets_metadata( + query_params: Annotated[StorageQueryParamsBase, Depends()], + location_id: LocationID, + request: Request, +) -> Envelope[list[DatasetMetaDataGet]]: + dsm = get_dsm_provider(request.app).get(location_id) + data = await dsm.list_datasets(query_params.user_id) + return Envelope[list[DatasetMetaDataGet]]( + data=[DatasetMetaDataGet(**d.model_dump()) for d in data] + ) + + +@router.get( + "/locations/{location_id}/datasets/{dataset_id}/metadata", + response_model=Envelope[list[FileMetaDataGet]], +) +async def list_dataset_files_metadata( + query_params: Annotated[FilesMetadataDatasetQueryParams, Depends()], + location_id: LocationID, + dataset_id: str, + request: Request, +) -> Envelope[list[FileMetaDataGet]]: + dsm = get_dsm_provider(request.app).get(location_id) + data = await dsm.list_files_in_dataset( + user_id=query_params.user_id, + dataset_id=dataset_id, + expand_dirs=query_params.expand_dirs, + ) + return Envelope[list[FileMetaDataGet]]( + data=[FileMetaDataGet(**d.model_dump()) for d in data] + ) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py new file mode 100644 index 00000000000..c0b6a4f4a7c --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -0,0 +1,386 @@ +import asyncio +import logging +from typing import Annotated, cast +from urllib.parse import quote + +from fastapi import APIRouter, Depends, Header, HTTPException, Request +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + FileMetaDataGetv010, + FileUploadCompleteFutureResponse, + FileUploadCompleteLinks, + FileUploadCompleteResponse, + FileUploadCompleteState, + FileUploadCompletionBody, + FileUploadLinks, + FileUploadSchema, + SoftCopyBody, +) +from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID, StorageFileID +from pydantic import AnyUrl, ByteSize, TypeAdapter +from servicelib.aiohttp import status +from yarl import URL + +from ...dsm import get_dsm_provider +from ...exceptions.errors import FileMetaDataNotFoundError +from ...models import ( + FileDownloadQueryParams, + FileDownloadResponse, + FileMetaData, + FileMetadataListQueryParams, + FileUploadQueryParams, + FileUploadResponseV1, + StorageQueryParamsBase, + UploadLinks, +) +from ...modules.long_running_tasks import get_completed_upload_tasks +from ...simcore_s3_dsm import SimcoreS3DataManager +from ...utils.utils import create_upload_completion_task_name + +_logger = logging.getLogger(__name__) + +router = APIRouter( + tags=[ + "files", + ], +) + + +@router.get( + "/locations/{location_id}/files/metadata", + response_model=Envelope[list[FileMetaDataGet]], +) +async def list_files_metadata( + query_params: Annotated[FileMetadataListQueryParams, Depends()], + location_id: LocationID, + request: Request, +): + dsm = get_dsm_provider(request.app).get(location_id) + data: list[FileMetaData] = await dsm.list_files( + user_id=query_params.user_id, + expand_dirs=query_params.expand_dirs, + uuid_filter=query_params.uuid_filter + or f"{query_params.project_id or ''}", # NOTE: https://github.com/ITISFoundation/osparc-issues/issues/1593 + project_id=query_params.project_id, + ) + return Envelope[list[FileMetaDataGet]]( + data=[FileMetaDataGet(**d.model_dump()) for d in data] + ) + + +@router.get( + "/locations/{location_id}/files/{file_id:path}/metadata", + response_model=Envelope[FileMetaDataGet] + | Envelope[FileMetaDataGetv010] + | Envelope[dict], +) +async def get_file_metadata( + query_params: Annotated[StorageQueryParamsBase, Depends()], + location_id: LocationID, + file_id: StorageFileID, + user_agent: Annotated[str | None, Header()], + request: Request, +): + # NOTE: Used by legacy dynamic services -> MUST BE BACKWARDS COMPATIBLE + dsm = get_dsm_provider(request.app).get(location_id) + try: + data = await dsm.get_file( + user_id=query_params.user_id, + file_id=file_id, + ) + except FileMetaDataNotFoundError: + # NOTE: LEGACY compatibility + # This is what happens Larry... data must be an empty {} or else some old dynamic services will FAIL (sic) + # Cannot remove until we retire all legacy services + # https://github.com/ITISFoundation/osparc-simcore/issues/5676 + # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_enveloped.py#L34 + return Envelope[dict]( + data={}, + error="No result found", # NOTE: LEGACY compatibility + ) + + if user_agent == "OpenAPI-Generator/0.1.0/python": + # NOTE: LEGACY compatiblity with API v0.1.0 + # SEE models used in sdk in: + # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_enveloped.py#L34 + # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_type.py#L34 + return Envelope[FileMetaDataGetv010]( + data=FileMetaDataGetv010( + file_uuid=data.file_uuid, + location_id=data.location_id, + location=data.location, + bucket_name=data.bucket_name, + object_name=data.object_name, + project_id=data.project_id, + project_name=data.project_name, + node_id=data.node_id, + node_name=data.node_name, + file_name=data.file_name, + user_id=data.user_id, + user_name=None, + ) + ) + + return Envelope[FileMetaDataGet](data=FileMetaDataGet(**data.model_dump())) + + +@router.get( + "/locations/{location_id}/files/{file_id:path}", + response_model=Envelope[FileDownloadResponse], +) +async def download_file( + location_id: LocationID, + file_id: StorageFileID, + query_params: Annotated[FileDownloadQueryParams, Depends()], + request: Request, +) -> Envelope[FileDownloadResponse]: + # NOTE: Used by legacy dynamic services -> MUST BE BACKWARDS COMPATIBLE + dsm = get_dsm_provider(request.app).get(location_id) + link = await dsm.create_file_download_link( + query_params.user_id, file_id, query_params.link_type + ) + return Envelope[FileDownloadResponse](data=FileDownloadResponse(link=link)) + + +@router.put( + "/locations/{location_id}/files/{file_id:path}", + response_model=Envelope[FileUploadResponseV1] | Envelope[FileUploadSchema], +) +async def upload_file( + location_id: LocationID, + file_id: StorageFileID, + query_params: Annotated[FileUploadQueryParams, Depends()], + request: Request, +): + """creates upload file links: + + This function covers v1 and v2 versions of the handler. + Note: calling this entrypoint on an already existing file will overwrite that file. That file will be deleted + before the upload takes place. + + v1 rationale: + - client calls this handler, which returns a single link (either direct S3 or presigned) to the S3 backend + - client uploads the file + - storage relies on lazy update to find if the file is finished uploaded (when client calls get_file_meta_data, or if the dsm_cleaner goes over it after the upload time is expired) + + v2 rationale: + - client calls this handler, which returns a FileUploadSchema object containing 1 or more links (either S3/presigned links) + - client uploads the file (by chunking it if there are more than 1 presigned link) + - client calls complete_upload handle which will reconstruct the file on S3 backend + - client waits for completion to finish and then the file is accessible on S3 backend + + + Use-case v1: query.file_size is not defined, returns a PresignedLink model (backward compatibility) + Use-case v1.1: if query.link_type=presigned or None, returns a presigned link (limited to a single 5GB file) + Use-case v1.2: if query.link_type=s3, returns a s3 direct link (limited to a single 5TB file) + + User-case v2: query.is_directory is True (query.file_size is forced to -1), returns an s3 path where to upload all the content of the directory + User-case v2: if query.file_size is defined, returns a FileUploadSchema model, expects client to call "complete_upload" when the file is finished uploading + Use-case v2.1: if query.file_size == 0 and query.link_type=presigned or None, returns a single presigned link inside FileUploadSchema (limited to a single 5Gb file) + Use-case v2.2: if query.file_size > 0 and query.link_type=presigned or None, returns 1 or more presigned links depending on the file size (limited to a single 5TB file) + Use-case v2.3: if query.link_type=s3 and query.file_size>=0, returns a single s3 direct link (limited to a single 5TB file) + """ + # NOTE: Used by legacy dynamic services with single presigned link -> MUST BE BACKWARDS COMPATIBLE + dsm = get_dsm_provider(request.app).get(location_id) + links: UploadLinks = await dsm.create_file_upload_links( + user_id=query_params.user_id, + file_id=file_id, + link_type=query_params.link_type, + file_size_bytes=query_params.file_size or ByteSize(0), + is_directory=query_params.is_directory, + sha256_checksum=query_params.sha256_checksum, + ) + if query_params.is_v1_upload: + # return v1 response + assert len(links.urls) == 1 # nosec + return Envelope[FileUploadResponseV1]( + data=FileUploadResponseV1(link=links.urls[0]) + ) + + # v2 response + + abort_url = ( + URL(f"{request.url}") + .with_path( + quote( + request.app.url_path_for( + "abort_upload_file", + location_id=f"{location_id}", + file_id=file_id, + ), + safe=":/", + ), + encoded=True, + ) + .with_query(user_id=query_params.user_id) + ) + + complete_url = ( + URL(f"{request.url}") + .with_path( + quote( + request.app.url_path_for( + "complete_upload_file", + location_id=f"{location_id}", + file_id=file_id, + ), + safe=":/", + ), + encoded=True, + ) + .with_query(user_id=query_params.user_id) + ) + + v2_response = FileUploadSchema( + chunk_size=links.chunk_size, + urls=links.urls, + links=FileUploadLinks( + abort_upload=TypeAdapter(AnyUrl).validate_python(f"{abort_url}"), + complete_upload=TypeAdapter(AnyUrl).validate_python(f"{complete_url}"), + ), + ) + return Envelope[FileUploadSchema](data=v2_response) + + +@router.post( + "/locations/{location_id}/files/{file_id:path}:abort", + status_code=status.HTTP_204_NO_CONTENT, +) +async def abort_upload_file( + location_id: LocationID, + file_id: StorageFileID, + query_params: Annotated[StorageQueryParamsBase, Depends()], + request: Request, +): + dsm = get_dsm_provider(request.app).get(location_id) + await dsm.abort_file_upload(query_params.user_id, file_id) + + +@router.post( + "/locations/{location_id}/files/{file_id:path}:complete", + response_model=Envelope[FileUploadCompleteResponse], + status_code=status.HTTP_202_ACCEPTED, +) +async def complete_upload_file( + query_params: Annotated[StorageQueryParamsBase, Depends()], + location_id: LocationID, + file_id: StorageFileID, + body: FileUploadCompletionBody, + request: Request, +): + dsm = get_dsm_provider(request.app).get(location_id) + # NOTE: completing a multipart upload on AWS can take up to several minutes + # if it returns slow we return a 202 - Accepted, the client will have to check later + # for completeness + task = asyncio.create_task( + dsm.complete_file_upload(file_id, query_params.user_id, body.parts), + name=create_upload_completion_task_name(query_params.user_id, file_id), + ) + get_completed_upload_tasks(request.app)[task.get_name()] = task + + route = ( + URL(f"{request.url}") + .with_path( + quote( + request.app.url_path_for( + "is_completed_upload_file", + location_id=f"{location_id}", + file_id=file_id, + future_id=task.get_name(), + ), + safe=":/", + ), + encoded=True, + ) + .with_query(user_id=query_params.user_id) + ) + complete_task_state_url = f"{route}" + + response = FileUploadCompleteResponse( + links=FileUploadCompleteLinks( + state=TypeAdapter(AnyUrl).validate_python(complete_task_state_url) + ) + ) + return Envelope[FileUploadCompleteResponse](data=response) + + +@router.post( + "/locations/{location_id}/files/{file_id:path}:complete/futures/{future_id}", + response_model=Envelope[FileUploadCompleteFutureResponse], +) +async def is_completed_upload_file( + query_params: Annotated[StorageQueryParamsBase, Depends()], + location_id: LocationID, + file_id: StorageFileID, + future_id: str, + request: Request, +): + # NOTE: completing a multipart upload on AWS can take up to several minutes + # therefore we wait a bit to see if it completes fast and return a 204 + # if it returns slow we return a 202 - Accepted, the client will have to check later + # for completeness + task_name = create_upload_completion_task_name(query_params.user_id, file_id) + assert task_name == future_id # nosec # NOTE: fastapi auto-decode path parameters + # first check if the task is in the app + if task := get_completed_upload_tasks(request.app).get(task_name): + if task.done(): + new_fmd: FileMetaData = task.result() + get_completed_upload_tasks(request.app).pop(task_name) + response = FileUploadCompleteFutureResponse( + state=FileUploadCompleteState.OK, e_tag=new_fmd.entity_tag + ) + else: + # the task is still running + response = FileUploadCompleteFutureResponse( + state=FileUploadCompleteState.NOK + ) + return Envelope[FileUploadCompleteFutureResponse](data=response) + # there is no task, either wrong call or storage was restarted + # we try to get the file to see if it exists in S3 + dsm = get_dsm_provider(request.app).get(location_id) + if fmd := await dsm.get_file( + user_id=query_params.user_id, + file_id=file_id, + ): + return Envelope[FileUploadCompleteFutureResponse]( + data=FileUploadCompleteFutureResponse( + state=FileUploadCompleteState.OK, e_tag=fmd.entity_tag + ) + ) + raise HTTPException( + status.HTTP_404_NOT_FOUND, + detail="Not found. Upload could not be completed. Please try again and contact support if it fails again.", + ) + + +@router.delete( + "/locations/{location_id}/files/{file_id:path}", + status_code=status.HTTP_204_NO_CONTENT, +) +async def delete_file( + query_params: Annotated[StorageQueryParamsBase, Depends()], + location_id: LocationID, + file_id: StorageFileID, + request: Request, +): + dsm = get_dsm_provider(request.app).get(location_id) + await dsm.delete_file(query_params.user_id, file_id) + + +@router.post( + "/files/{file_id:path}:soft-copy", response_model=Envelope[FileMetaDataGet] +) +async def copy_as_soft_link( + query_params: Annotated[StorageQueryParamsBase, Depends()], + file_id: StorageFileID, + body: SoftCopyBody, + request: Request, +): + dsm = cast( + SimcoreS3DataManager, + get_dsm_provider(request.app).get(SimcoreS3DataManager.get_location_id()), + ) + file_link = await dsm.create_soft_link(query_params.user_id, file_id, body.link_id) + + return Envelope[FileMetaDataGet](data=FileMetaDataGet(**file_link.model_dump())) diff --git a/services/storage/src/simcore_service_storage/api/rest/_health.py b/services/storage/src/simcore_service_storage/api/rest/_health.py new file mode 100644 index 00000000000..7272066ee75 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/_health.py @@ -0,0 +1,89 @@ +""" + +- Checks connectivity with other services in the backend + +""" + +import logging + +from aws_library.s3 import S3AccessError +from fastapi import APIRouter, Request +from models_library.api_schemas_storage.storage_schemas import HealthCheck, S3BucketName +from models_library.app_diagnostics import AppStatusCheck +from models_library.generics import Envelope +from pydantic import TypeAdapter +from servicelib.db_asyncpg_utils import check_postgres_liveness +from servicelib.fastapi.db_asyncpg_engine import get_engine +from simcore_postgres_database.utils_aiosqlalchemy import get_pg_engine_stateinfo + +from ..._meta import API_VERSION, PROJECT_NAME, VERSION +from ...core.settings import get_application_settings +from ...modules.s3 import get_s3_client + +_logger = logging.getLogger(__name__) + +router = APIRouter( + tags=[ + "status", + ], +) + + +@router.get("/", include_in_schema=True, response_model=Envelope[HealthCheck]) +async def get_health( + request: Request, +) -> Envelope[HealthCheck]: + assert request # nosec + return Envelope[HealthCheck]( + data=HealthCheck( + name=PROJECT_NAME, + version=f"{VERSION}", + api_version=API_VERSION, + status=None, + ) + ) + + +@router.get("/status", response_model=Envelope[AppStatusCheck]) +async def get_status(request: Request) -> Envelope[AppStatusCheck]: + # NOTE: all calls here must NOT raise + assert request.app # nosec + app_settings = get_application_settings(request.app) + s3_state = "disabled" + if app_settings.STORAGE_S3: + try: + s3_state = ( + "connected" + if await get_s3_client(request.app).bucket_exists( + bucket=TypeAdapter(S3BucketName).validate_python( + app_settings.STORAGE_S3.S3_BUCKET_NAME + ) + ) + else "no access to S3 bucket" + ) + except S3AccessError: + s3_state = "failed" + + postgres_state = "disabled" + + if app_settings.STORAGE_POSTGRES: + postgres_state = ( + "connected" + if await check_postgres_liveness(get_engine(request.app)) + else "failed" + ) + + status = AppStatusCheck.model_validate( + { + "app_name": PROJECT_NAME, + "version": f"{VERSION}", + "services": { + "postgres": { + "healthy": postgres_state, + "pool": await get_pg_engine_stateinfo(get_engine(request.app)), + }, + "s3": {"healthy": s3_state}, + }, + } + ) + return Envelope[AppStatusCheck](data=status) diff --git a/services/storage/src/simcore_service_storage/api/rest/_locations.py b/services/storage/src/simcore_service_storage/api/rest/_locations.py new file mode 100644 index 00000000000..ec33f8e31c7 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/_locations.py @@ -0,0 +1,33 @@ +import logging +from typing import Annotated + +from fastapi import APIRouter, Depends, Request, status +from models_library.api_schemas_storage.storage_schemas import FileLocation +from models_library.generics import Envelope + +from ...dsm import get_dsm_provider +from ...models import StorageQueryParamsBase + +_logger = logging.getLogger(__name__) + +router = APIRouter( + tags=["locations"], +) + + +@router.get( + "/locations", + status_code=status.HTTP_200_OK, + response_model=Envelope[list[FileLocation]], +) +async def list_storage_locations( + query_params: Annotated[StorageQueryParamsBase, Depends()], request: Request +): + dsm_provider = get_dsm_provider(request.app) + location_ids = dsm_provider.locations() + locs: list[FileLocation] = [] + for loc_id in location_ids: + dsm = dsm_provider.get(loc_id) + if await dsm.authorized(query_params.user_id): + locs.append(FileLocation(name=dsm.location_name, id=dsm.location_id)) + return Envelope[list[FileLocation]](data=locs) diff --git a/services/storage/src/simcore_service_storage/api/rest/_paths.py b/services/storage/src/simcore_service_storage/api/rest/_paths.py new file mode 100644 index 00000000000..bdb5a171f0c --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/_paths.py @@ -0,0 +1,68 @@ +import logging +from pathlib import Path +from typing import Annotated + +from fastapi import APIRouter, Depends +from fastapi_pagination import create_page +from models_library.api_schemas_storage.storage_schemas import ( + PathMetaDataGet, + PathTotalSizeCreate, +) +from models_library.generics import Envelope +from models_library.users import UserID +from servicelib.fastapi.rest_pagination import ( + CustomizedPathsCursorPage, + CustomizedPathsCursorPageParams, +) + +from ...dsm_factory import BaseDataManager +from .dependencies.dsm_prodiver import get_data_manager + +_logger = logging.getLogger(__name__) + +router = APIRouter( + tags=[ + "files", + ], +) + + +@router.get( + "/locations/{location_id}/paths", + response_model=CustomizedPathsCursorPage[PathMetaDataGet], +) +async def list_paths( + page_params: Annotated[CustomizedPathsCursorPageParams, Depends()], + dsm: Annotated[BaseDataManager, Depends(get_data_manager)], + user_id: UserID, + file_filter: Path | None = None, +): + """Returns one level of files (paginated)""" + items, next_cursor, total_number = await dsm.list_paths( + user_id=user_id, + file_filter=file_filter, + limit=page_params.size, + cursor=page_params.to_raw_params().cursor, + ) + return create_page( + [_.to_api_model() for _ in items], + total=total_number, + params=page_params, + next_=next_cursor, + ) + + +@router.post( + "/locations/{location_id}/paths/{path:path}:size", + response_model=Envelope[PathTotalSizeCreate], +) +async def compute_path_size( + dsm: Annotated[BaseDataManager, Depends(get_data_manager)], + user_id: UserID, + path: Path, +): + return Envelope[PathTotalSizeCreate]( + data=PathTotalSizeCreate( + path=path, size=await dsm.compute_path_size(user_id, path=path) + ) + ) diff --git a/services/storage/src/simcore_service_storage/api/rest/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rest/_simcore_s3.py new file mode 100644 index 00000000000..29b199e6feb --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/_simcore_s3.py @@ -0,0 +1,163 @@ +import asyncio +import logging +from typing import Annotated, Any, cast + +from fastapi import APIRouter, Depends, FastAPI, Request +from models_library.api_schemas_long_running_tasks.base import TaskProgress +from models_library.api_schemas_long_running_tasks.tasks import TaskGet +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + FoldersBody, +) +from models_library.generics import Envelope +from models_library.projects import ProjectID +from servicelib.aiohttp import status +from servicelib.fastapi.long_running_tasks._dependencies import get_tasks_manager +from servicelib.logging_utils import log_context +from servicelib.long_running_tasks._task import start_task +from settings_library.s3 import S3Settings +from yarl import URL + +from ...dsm import get_dsm_provider +from ...models import ( + DeleteFolderQueryParams, + FileMetaData, + SearchFilesQueryParams, + StorageQueryParamsBase, +) +from ...modules import sts +from ...simcore_s3_dsm import SimcoreS3DataManager + +_logger = logging.getLogger(__name__) + +router = APIRouter( + tags=[ + "simcore-s3", + ], +) + + +@router.post("/simcore-s3:access", response_model=Envelope[S3Settings]) +async def get_or_create_temporary_s3_access( + query_params: Annotated[StorageQueryParamsBase, Depends()], + request: Request, +): + # NOTE: the name of the method is not accurate, these are not temporary at all + # it returns the credentials of the s3 backend! + s3_settings: S3Settings = await sts.get_or_create_temporary_token_for_user( + request.app, query_params.user_id + ) + return Envelope[S3Settings](data=s3_settings) + + +async def _copy_folders_from_project( + progress: TaskProgress, + app: FastAPI, + query_params: StorageQueryParamsBase, + body: FoldersBody, +) -> Envelope[dict[str, Any]]: + dsm = cast( + SimcoreS3DataManager, + get_dsm_provider(app).get(SimcoreS3DataManager.get_location_id()), + ) + with log_context( + _logger, + logging.INFO, + msg=f"copying {body.source['uuid']} -> {body.destination['uuid']}", + ): + await dsm.deep_copy_project_simcore_s3( + query_params.user_id, + body.source, + body.destination, + body.nodes_map, + task_progress=progress, + ) + + return Envelope[dict[str, Any]](data=body.destination) + + +@router.post( + "/simcore-s3/folders", + response_model=Envelope[TaskGet], + status_code=status.HTTP_202_ACCEPTED, +) +async def copy_folders_from_project( + query_params: Annotated[StorageQueryParamsBase, Depends()], + body: FoldersBody, + request: Request, +): + task_id = None + try: + task_id = start_task( + get_tasks_manager(request), + _copy_folders_from_project, + app=request.app, + query_params=query_params, + body=body, + ) + relative_url = URL(f"{request.url}").relative() + + return Envelope[TaskGet]( + data=TaskGet( + task_id=task_id, + task_name=f"{request.method} {relative_url}", + status_href=f"{request.url_for('get_task_status', task_id=task_id)}", + result_href=f"{request.url_for('get_task_result', task_id=task_id)}", + abort_href=f"{request.url_for('cancel_and_delete_task', task_id=task_id)}", + ) + ) + except asyncio.CancelledError: + if task_id: + await get_tasks_manager(request).cancel_task( + task_id, with_task_context=None + ) + raise + + +@router.delete( + "/simcore-s3/folders/{folder_id}", + status_code=status.HTTP_204_NO_CONTENT, +) +async def delete_folders_of_project( + query_params: Annotated[DeleteFolderQueryParams, Depends()], + folder_id: str, + request: Request, +): + dsm = cast( + SimcoreS3DataManager, + get_dsm_provider(request.app).get(SimcoreS3DataManager.get_location_id()), + ) + await dsm.delete_project_simcore_s3( + query_params.user_id, + ProjectID(folder_id), + query_params.node_id, + ) + + +@router.post( + "/simcore-s3/files/metadata:search", + response_model=Envelope[list[FileMetaDataGet]], +) +async def search_files( + query_params: Annotated[SearchFilesQueryParams, Depends()], request: Request +): + dsm = cast( + SimcoreS3DataManager, + get_dsm_provider(request.app).get(SimcoreS3DataManager.get_location_id()), + ) + + data: list[FileMetaData] = await dsm.search_owned_files( + user_id=query_params.user_id, + file_id_prefix=query_params.startswith, + sha256_checksum=query_params.sha256_checksum, + limit=query_params.limit, + offset=query_params.offset, + ) + _logger.debug( + "Found %d files starting with '%s'", + len(data), + f"{query_params.startswith=}, {query_params.sha256_checksum=}", + ) + return Envelope[list[FileMetaDataGet]]( + data=[FileMetaDataGet(**d.model_dump()) for d in data] + ) diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/__init__.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/application.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/application.py new file mode 100644 index 00000000000..706818ca793 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/dependencies/application.py @@ -0,0 +1,18 @@ +# mypy: disable-error-code=truthy-function +from fastapi import Request +from servicelib.fastapi.dependencies import get_app, get_reverse_url_mapper + +from ....core.settings import ApplicationSettings, get_application_settings + + +def get_settings(request: Request) -> ApplicationSettings: + return get_application_settings(request.app) + + +assert get_reverse_url_mapper # nosec +assert get_app # nosec + +__all__: tuple[str, ...] = ( + "get_app", + "get_reverse_url_mapper", +) diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/dsm_prodiver.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/dsm_prodiver.py new file mode 100644 index 00000000000..a0f16979e25 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/dependencies/dsm_prodiver.py @@ -0,0 +1,23 @@ +from typing import Annotated + +from fastapi import Depends, FastAPI +from models_library.projects_nodes_io import LocationID +from servicelib.fastapi.dependencies import get_app + +from ....dsm import get_dsm_provider +from ....dsm_factory import BaseDataManager, DataManagerProvider + + +def get_data_manager_provider( + app: Annotated[FastAPI, Depends(get_app)], +) -> DataManagerProvider: + return get_dsm_provider(app) + + +async def get_data_manager( + location_id: LocationID, + data_manager_provider: Annotated[ + DataManagerProvider, Depends(get_data_manager_provider) + ], +) -> BaseDataManager: + return data_manager_provider.get(location_id) diff --git a/services/storage/src/simcore_service_storage/api/rest/routes.py b/services/storage/src/simcore_service_storage/api/rest/routes.py new file mode 100644 index 00000000000..bb77c1d3f77 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rest/routes.py @@ -0,0 +1,20 @@ +from fastapi import APIRouter, FastAPI + +from . import _datasets, _files, _health, _locations, _paths, _simcore_s3 + +v0_router = APIRouter() + +health_router = _health.router +v0_router.include_router(_health.router) +v0_router.include_router(_locations.router) +v0_router.include_router(_datasets.router) +v0_router.include_router(_files.router) +v0_router.include_router(_paths.router) +v0_router.include_router(_simcore_s3.router) + + +def setup_rest_api_routes(app: FastAPI, vtag: str): + # healthcheck at / and at /v0/ + app.include_router(health_router, prefix=f"/{vtag}") + # api under /v* + app.include_router(v0_router, prefix=f"/{vtag}") diff --git a/services/storage/src/simcore_service_storage/api/rpc/__init__.py b/services/storage/src/simcore_service_storage/api/rpc/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py new file mode 100644 index 00000000000..6c928b18a3a --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py @@ -0,0 +1,147 @@ +# pylint: disable=unused-argument + +import logging + +from celery.exceptions import CeleryError # type: ignore[import-untyped] +from fastapi import FastAPI +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobId, + AsyncJobNameData, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import ( + JobAbortedError, + JobError, + JobMissingError, + JobNotDoneError, + JobSchedulerError, +) +from servicelib.logging_utils import log_catch +from servicelib.rabbitmq import RPCRouter + +from ...modules.celery import get_celery_client +from ...modules.celery.client import CeleryTaskQueueClient +from ...modules.celery.models import TaskError, TaskState + +_logger = logging.getLogger(__name__) +router = RPCRouter() + + +async def _assert_job_exists( + *, + job_id: AsyncJobId, + job_id_data: AsyncJobNameData, + celery_client: CeleryTaskQueueClient, +) -> None: + """Raises JobMissingError if job doesn't exist""" + job_ids = await celery_client.get_task_uuids( + task_context=job_id_data.model_dump(), + ) + if job_id not in job_ids: + raise JobMissingError(job_id=f"{job_id}") + + +@router.expose(reraise_if_error_type=(JobSchedulerError, JobMissingError)) +async def cancel(app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData): + assert app # nosec + assert job_id_data # nosec + try: + await _assert_job_exists( + job_id=job_id, job_id_data=job_id_data, celery_client=get_celery_client(app) + ) + await get_celery_client(app).abort_task( + task_context=job_id_data.model_dump(), + task_uuid=job_id, + ) + except CeleryError as exc: + raise JobSchedulerError(exc=f"{exc}") from exc + + +@router.expose(reraise_if_error_type=(JobSchedulerError, JobMissingError)) +async def status( + app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData +) -> AsyncJobStatus: + assert app # nosec + assert job_id_data # nosec + + try: + await _assert_job_exists( + job_id=job_id, job_id_data=job_id_data, celery_client=get_celery_client(app) + ) + task_status = await get_celery_client(app).get_task_status( + task_context=job_id_data.model_dump(), + task_uuid=job_id, + ) + except CeleryError as exc: + raise JobSchedulerError(exc=f"{exc}") from exc + + return AsyncJobStatus( + job_id=job_id, + progress=task_status.progress_report, + done=task_status.is_done, + ) + + +@router.expose( + reraise_if_error_type=( + JobError, + JobNotDoneError, + JobAbortedError, + JobSchedulerError, + JobMissingError, + ) +) +async def result( + app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData +) -> AsyncJobResult: + assert app # nosec + assert job_id # nosec + assert job_id_data # nosec + + try: + await _assert_job_exists( + job_id=job_id, job_id_data=job_id_data, celery_client=get_celery_client(app) + ) + _status = await get_celery_client(app).get_task_status( + task_context=job_id_data.model_dump(), + task_uuid=job_id, + ) + if not _status.is_done: + raise JobNotDoneError(job_id=job_id) + _result = await get_celery_client(app).get_task_result( + task_context=job_id_data.model_dump(), + task_uuid=job_id, + ) + except CeleryError as exc: + raise JobSchedulerError(exc=f"{exc}") from exc + + if _status.task_state == TaskState.ABORTED: + raise JobAbortedError(job_id=job_id) + if _status.task_state == TaskState.ERROR: + exc_type = "" + exc_msg = "" + with log_catch(logger=_logger, reraise=False): + task_error = TaskError.model_validate(_result) + exc_type = task_error.exc_type + exc_msg = task_error.exc_msg + raise JobError(job_id=job_id, exc_type=exc_type, exc_msg=exc_msg) + + return AsyncJobResult(result=_result) + + +@router.expose(reraise_if_error_type=(JobSchedulerError,)) +async def list_jobs( + app: FastAPI, filter_: str, job_id_data: AsyncJobNameData +) -> list[AsyncJobGet]: + assert app # nosec + + try: + task_uuids = await get_celery_client(app).get_task_uuids( + task_context=job_id_data.model_dump(), + ) + except CeleryError as exc: + raise JobSchedulerError(exc=f"{exc}") from exc + + return [AsyncJobGet(job_id=task_uuid) for task_uuid in task_uuids] diff --git a/services/storage/src/simcore_service_storage/api/rpc/_data_export.py b/services/storage/src/simcore_service_storage/api/rpc/_data_export.py new file mode 100644 index 00000000000..7fe6612e5e3 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rpc/_data_export.py @@ -0,0 +1,65 @@ +from celery.exceptions import CeleryError # type: ignore[import-untyped] +from fastapi import FastAPI +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobNameData, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import JobSchedulerError +from models_library.api_schemas_storage.data_export_async_jobs import ( + AccessRightError, + DataExportTaskStartInput, + InvalidFileIdentifierError, +) +from servicelib.rabbitmq import RPCRouter + +from ...datcore_dsm import DatCoreDataManager +from ...dsm import get_dsm_provider +from ...exceptions.errors import FileAccessRightError +from ...modules.celery import get_celery_client +from ...modules.datcore_adapter.datcore_adapter_exceptions import DatcoreAdapterError +from ...simcore_s3_dsm import SimcoreS3DataManager + +router = RPCRouter() + + +@router.expose( + reraise_if_error_type=( + InvalidFileIdentifierError, + AccessRightError, + JobSchedulerError, + ) +) +async def start_data_export( + app: FastAPI, + data_export_start: DataExportTaskStartInput, + job_id_data: AsyncJobNameData, +) -> AsyncJobGet: + assert app # nosec + + dsm = get_dsm_provider(app).get(data_export_start.location_id) + + try: + for _id in data_export_start.file_and_folder_ids: + if isinstance(dsm, DatCoreDataManager): + _ = await dsm.get_file(user_id=job_id_data.user_id, file_id=_id) + elif isinstance(dsm, SimcoreS3DataManager): + await dsm.can_read_file(user_id=job_id_data.user_id, file_id=_id) + + except (FileAccessRightError, DatcoreAdapterError) as err: + raise AccessRightError( + user_id=job_id_data.user_id, + file_id=_id, + location_id=data_export_start.location_id, + ) from err + + try: + task_uuid = await get_celery_client(app).send_task( + "export_data", + task_context=job_id_data.model_dump(), + files=data_export_start.file_and_folder_ids, # ANE: adapt here your signature + ) + except CeleryError as exc: + raise JobSchedulerError(exc=f"{exc}") from exc + return AsyncJobGet( + job_id=task_uuid, + ) diff --git a/services/storage/src/simcore_service_storage/api/rpc/_paths.py b/services/storage/src/simcore_service_storage/api/rpc/_paths.py new file mode 100644 index 00000000000..34ea3deeedb --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rpc/_paths.py @@ -0,0 +1,35 @@ +from pathlib import Path + +from fastapi import FastAPI +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobNameData, +) +from models_library.projects_nodes_io import LocationID +from servicelib.rabbitmq import RPCRouter + +from ...modules.celery import get_celery_client +from .._worker_tasks._paths import compute_path_size as remote_compute_path_size + +router = RPCRouter() + + +@router.expose(reraise_if_error_type=None) +async def compute_path_size( + app: FastAPI, + job_id_data: AsyncJobNameData, + # user_id: UserID, + location_id: LocationID, + path: Path, +) -> AsyncJobGet: + assert app # nosec + + task_uuid = await get_celery_client(app).send_task( + remote_compute_path_size.__name__, + task_context=job_id_data.model_dump(), + user_id=job_id_data.user_id, + location_id=location_id, + path=path, + ) + + return AsyncJobGet(job_id=task_uuid) diff --git a/services/storage/src/simcore_service_storage/api/rpc/routes.py b/services/storage/src/simcore_service_storage/api/rpc/routes.py new file mode 100644 index 00000000000..799a2b4e839 --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rpc/routes.py @@ -0,0 +1,32 @@ +import logging + +from fastapi import FastAPI +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from servicelib.logging_utils import log_context +from servicelib.rabbitmq import RPCRouter + +from ...modules.rabbitmq import get_rabbitmq_rpc_server +from . import _async_jobs, _data_export, _paths + +_logger = logging.getLogger(__name__) + + +ROUTERS: list[RPCRouter] = [ + _async_jobs.router, + _data_export.router, + _paths.router, +] + + +def setup_rpc_api_routes(app: FastAPI) -> None: + async def startup() -> None: + with log_context( + _logger, + logging.INFO, + msg="Storage startup RPC API Routes", + ): + rpc_server = get_rabbitmq_rpc_server(app) + for router in ROUTERS: + await rpc_server.register_router(router, STORAGE_RPC_NAMESPACE, app) + + app.add_event_handler("startup", startup) diff --git a/services/storage/src/simcore_service_storage/api/v0/openapi.yaml b/services/storage/src/simcore_service_storage/api/v0/openapi.yaml deleted file mode 100644 index 22f27d960ac..00000000000 --- a/services/storage/src/simcore_service_storage/api/v0/openapi.yaml +++ /dev/null @@ -1,1596 +0,0 @@ -openapi: 3.1.0 -info: - title: simcore-service-storage API - description: API definition for simcore-service-storage service - contact: - name: IT'IS Foundation - email: support@simcore.io - license: - name: MIT - url: https://github.com/ITISFoundation/osparc-simcore/blob/master/LICENSE - version: 0.5.0 -servers: -- url: / - description: 'Default server: requests directed to serving url' -- url: http://{host}:{port}/ - description: 'Development server: can configure any base url' - variables: - host: - default: 127.0.0.1 - port: - default: '8000' -paths: - /v0/locations/{location_id}/datasets: - get: - tags: - - datasets - summary: Get datasets metadata - description: returns all the top level datasets a user has access to - operationId: get_datasets_metadata - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_list_DatasetMetaData__' - /v0/locations/{location_id}/datasets/{dataset_id}/metadata: - get: - tags: - - datasets - summary: Get Files Metadata - description: returns all the file meta data inside dataset with dataset_id - operationId: get_files_metadata_dataset - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: dataset_id - in: path - required: true - schema: - type: string - title: Dataset Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - - name: expand_dirs - in: query - required: false - schema: - type: boolean - description: Automatic directory expansion. This will be replaced by pagination - the future - default: true - title: Expand Dirs - description: Automatic directory expansion. This will be replaced by pagination - the future - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_list_FileMetaDataGet__' - /v0/locations: - get: - tags: - - locations - summary: Get available storage locations - description: Returns the list of available storage locations - operationId: get_storage_locations - parameters: - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - responses: - '200': - description: Successful Response - content: - application/json: - schema: - type: array - items: - $ref: '#/components/schemas/DatasetMetaData' - title: Response Get Storage Locations - /v0/locations/{location_id}:sync: - post: - tags: - - locations - summary: Manually triggers the synchronisation of the file meta data table in - the database - description: Returns an object containing added, changed and removed paths - operationId: synchronise_meta_data_table - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: dry_run - in: query - required: false - schema: - type: boolean - default: false - title: Dry Run - - name: fire_and_forget - in: query - required: false - schema: - type: boolean - default: false - title: Fire And Forget - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_TableSynchronisation_' - /v0/locations/{location_id}/files/metadata: - get: - tags: - - files - summary: Get datasets metadata - description: returns all the file meta data a user has access to (uuid_filter - may be used) - operationId: get_files_metadata - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: uuid_filter - in: query - required: false - schema: - type: string - default: '' - title: Uuid Filter - - name: expand_dirs - in: query - required: false - schema: - type: boolean - description: Automatic directory expansion. This will be replaced by pagination - the future - default: true - title: Expand Dirs - description: Automatic directory expansion. This will be replaced by pagination - the future - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_list_DatasetMetaData__' - /v0/locations/{location_id}/files/{file_id}/metadata: - get: - tags: - - files - summary: Get File Metadata - description: returns the file meta data of file_id if user_id has the rights - to - operationId: get_file_metadata - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - responses: - '200': - description: Successful Response - content: - application/json: - schema: - anyOf: - - $ref: '#/components/schemas/FileMetaData' - - $ref: '#/components/schemas/Envelope_FileMetaDataGet_' - title: Response Get File Metadata - /v0/locations/{location_id}/files/{file_id}: - get: - tags: - - files - summary: Returns download link for requested file - description: creates a download file link if user has the rights to - operationId: download_file - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - - name: link_type - in: query - required: false - schema: - $ref: '#/components/schemas/LinkType' - default: PRESIGNED - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_PresignedLink_' - put: - tags: - - files - summary: Returns upload link - description: creates one or more upload file links if user has the rights to, - expects the client to complete/abort upload - operationId: upload_file - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: file_size - in: query - required: true - schema: - anyOf: - - type: string - pattern: ^\s*(\d*\.?\d+)\s*(\w+)? - - type: integer - minimum: 0 - - type: 'null' - title: File Size - - name: link_type - in: query - required: false - schema: - $ref: '#/components/schemas/LinkType' - default: PRESIGNED - - name: is_directory - in: query - required: false - schema: - type: boolean - default: false - title: Is Directory - responses: - '200': - description: Successful Response - content: - application/json: - schema: - anyOf: - - $ref: '#/components/schemas/Envelope_FileUploadSchema_' - - $ref: '#/components/schemas/Envelope_Url_' - title: Response Upload File - delete: - tags: - - files - summary: Deletes File - description: deletes file if user has the rights to - operationId: delete_file - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - responses: - '204': - description: Successful Response - /v0/locations/{location_id}/files/{file_id}:abort: - post: - tags: - - files - summary: Abort Upload File - description: 'aborts an upload if user has the rights to, and reverts - - to the latest version if available, else will delete the file' - operationId: abort_upload_file - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - responses: - '204': - description: Successful Response - /v0/locations/{location_id}/files/{file_id}:complete: - post: - tags: - - files - summary: Complete Upload File - description: completes an upload if the user has the rights to - operationId: complete_upload_file - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_FileUploadCompletionBody_' - responses: - '202': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_FileUploadCompleteResponse_' - /v0/locations/{location_id}/files/{file_id}:complete/futures/{future_id}: - post: - tags: - - files - summary: Check for upload completion - description: Returns state of upload completion - operationId: is_completed_upload_file - parameters: - - name: location_id - in: path - required: true - schema: - type: integer - title: Location Id - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: future_id - in: path - required: true - schema: - type: string - title: Future Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_FileUploadCompleteFutureResponse_' - /v0/: - get: - tags: - - health - summary: health check endpoint - description: Current service health - operationId: health_check - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_HealthCheck_' - /v0/status: - get: - tags: - - health - summary: returns the status of the services inside - description: returns the status of all the external dependencies - operationId: get_status - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_AppStatusCheck_' - /v0/files/{file_id}:soft-copy: - post: - tags: - - files - summary: copy file as soft link - description: creates and returns a soft link - operationId: copy_as_soft_link - parameters: - - name: file_id - in: path - required: true - schema: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/SoftCopyBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/FileMetaDataGet' - /v0/simcore-s3:access: - post: - tags: - - simcore-s3 - summary: gets or creates the a temporary access - description: returns a set of S3 credentials - operationId: get_or_create_temporary_s3_access - parameters: - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_S3Settings_' - /v0/simcore-s3/folders: - post: - tags: - - simcore-s3 - summary: copies folders from project - description: copies folders from project - operationId: copy_folders_from_project - parameters: - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/FoldersBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_TaskGet_' - /v0/simcore-s3/folders/{folder_id}: - delete: - tags: - - simcore-s3 - summary: delete folders from project - description: removes folders from a project - operationId: delete_folders_of_project - parameters: - - name: folder_id - in: path - required: true - schema: - type: string - title: Folder Id - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - - name: node_id - in: query - required: false - schema: - anyOf: - - type: string - format: uuid - - type: 'null' - title: Node Id - responses: - '204': - description: Successful Response - /v0/simcore-s3/files/metadata:search: - post: - tags: - - simcore-s3 - summary: search for owned files - description: search for files starting with `startswith` and/or matching a sha256_checksum - in the file_meta_data table - operationId: search_files - parameters: - - name: user_id - in: query - required: true - schema: - type: integer - exclusiveMinimum: true - title: User Id - minimum: 0 - - name: startswith - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Startswith - - name: sha256_checksum - in: query - required: false - schema: - anyOf: - - type: string - pattern: ^[a-fA-F0-9]{64}$ - - type: 'null' - title: Sha256 Checksum - - name: kind - in: query - required: true - schema: - enum: - - owned - const: owned - type: string - title: Kind - - name: limit - in: query - required: false - schema: - type: integer - maximum: 50 - minimum: 1 - default: 20 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_FileMetaDataGet_' - /v0/futures: - get: - tags: - - tasks - summary: list current long running tasks - description: list current long running tasks - operationId: list_tasks - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_TaskGet_' - /v0/futures/{task_id}: - get: - tags: - - tasks - summary: gets the status of the task - description: gets the status of the task - operationId: get_task_status - parameters: - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_TaskStatus_' - delete: - tags: - - tasks - summary: cancels and removes the task - description: cancels and removes the task - operationId: cancel_and_delete_task - parameters: - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - responses: - '204': - description: Successful Response - /v0/futures/{task_id}/result: - get: - tags: - - tasks - summary: get result of the task - description: get result of the task - operationId: get_task_result - parameters: - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - title: Response Get Task Result -components: - schemas: - AppStatusCheck: - properties: - app_name: - type: string - title: App Name - description: Application name - version: - type: string - title: Version - description: Application's version - services: - type: object - title: Services - description: Other backend services connected from this service - default: {} - sessions: - anyOf: - - type: object - - type: 'null' - title: Sessions - description: Client sessions info. If single session per app, then is denoted - as main - default: {} - url: - anyOf: - - type: string - minLength: 1 - format: uri - - type: 'null' - title: Url - description: Link to current resource - diagnostics_url: - anyOf: - - type: string - minLength: 1 - format: uri - - type: 'null' - title: Diagnostics Url - description: Link to diagnostics report sub-resource. This MIGHT take some - time to compute - type: object - required: - - app_name - - version - title: AppStatusCheck - DatasetMetaData: - properties: - dataset_id: - anyOf: - - type: string - format: uuid - - type: string - pattern: ^N:dataset:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: Dataset Id - display_name: - type: string - title: Display Name - additionalProperties: false - type: object - required: - - dataset_id - - display_name - title: DatasetMetaData - Envelope_AppStatusCheck_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/AppStatusCheck' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[AppStatusCheck] - Envelope_FileMetaDataGet_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/FileMetaDataGet' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[FileMetaDataGet] - Envelope_FileUploadCompleteFutureResponse_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/FileUploadCompleteFutureResponse' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[FileUploadCompleteFutureResponse] - Envelope_FileUploadCompleteResponse_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/FileUploadCompleteResponse' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[FileUploadCompleteResponse] - Envelope_FileUploadCompletionBody_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/FileUploadCompletionBody' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[FileUploadCompletionBody] - Envelope_FileUploadSchema_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/FileUploadSchema' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[FileUploadSchema] - Envelope_HealthCheck_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/HealthCheck' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[HealthCheck] - Envelope_PresignedLink_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/PresignedLink' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[PresignedLink] - Envelope_S3Settings_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/S3Settings' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[S3Settings] - Envelope_TableSynchronisation_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/TableSynchronisation' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[TableSynchronisation] - Envelope_TaskGet_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/TaskGet' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[TaskGet] - Envelope_TaskStatus_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/TaskStatus' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[TaskStatus] - Envelope_Url_: - properties: - data: - anyOf: - - type: string - minLength: 1 - format: uri - - type: 'null' - title: Data - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[Url] - Envelope_list_DatasetMetaData__: - properties: - data: - anyOf: - - items: - $ref: '#/components/schemas/DatasetMetaData' - type: array - - type: 'null' - title: Data - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[list[DatasetMetaData]] - Envelope_list_FileMetaDataGet__: - properties: - data: - anyOf: - - items: - $ref: '#/components/schemas/FileMetaDataGet' - type: array - - type: 'null' - title: Data - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[list[FileMetaDataGet]] - FileMetaData: - properties: - file_uuid: - type: string - title: File Uuid - description: NOT a unique ID, like (api|uuid)/uuid/file_name or DATCORE - folder structure - location_id: - type: integer - title: Location Id - description: Storage location - project_name: - anyOf: - - type: string - - type: 'null' - title: Project Name - description: optional project name, used by frontend to display path - node_name: - anyOf: - - type: string - - type: 'null' - title: Node Name - description: optional node name, used by frontend to display path - file_name: - type: string - title: File Name - description: Display name for a file - file_id: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - description: THIS IS the unique ID for the file. either (api|project_id)/node_id/file_name.ext - for S3 and N:package:UUID for datcore - created_at: - type: string - format: date-time - title: Created At - last_modified: - type: string - format: date-time - title: Last Modified - file_size: - anyOf: - - type: integer - enum: - - -1 - const: -1 - - type: integer - minimum: 0 - title: File Size - description: File size in bytes (-1 means invalid) - default: -1 - entity_tag: - anyOf: - - type: string - - type: 'null' - title: Entity Tag - description: Entity tag (or ETag), represents a specific version of the - file, None if invalid upload or datcore - is_soft_link: - type: boolean - title: Is Soft Link - description: If true, this file is a soft link.i.e. is another entry with - the same object_name - default: false - is_directory: - type: boolean - title: Is Directory - description: if True this is a directory - default: false - sha256_checksum: - anyOf: - - type: string - pattern: ^[a-fA-F0-9]{64}$ - - type: 'null' - title: Sha256 Checksum - upload_id: - anyOf: - - type: string - - type: 'null' - title: Upload Id - upload_expires_at: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Upload Expires At - location: - type: string - title: Location - bucket_name: - type: string - title: Bucket Name - object_name: - type: string - title: Object Name - project_id: - anyOf: - - type: string - format: uuid - - type: 'null' - title: Project Id - node_id: - anyOf: - - type: string - format: uuid - - type: 'null' - title: Node Id - user_id: - anyOf: - - type: integer - exclusiveMinimum: true - minimum: 0 - - type: 'null' - title: User Id - type: object - required: - - file_uuid - - location_id - - file_name - - file_id - - created_at - - last_modified - - sha256_checksum - - location - - bucket_name - - object_name - - project_id - - node_id - - user_id - title: FileMetaData - FileMetaDataGet: - properties: - file_uuid: - type: string - title: File Uuid - description: NOT a unique ID, like (api|uuid)/uuid/file_name or DATCORE - folder structure - location_id: - type: integer - title: Location Id - description: Storage location - project_name: - anyOf: - - type: string - - type: 'null' - title: Project Name - description: optional project name, used by frontend to display path - node_name: - anyOf: - - type: string - - type: 'null' - title: Node Name - description: optional node name, used by frontend to display path - file_name: - type: string - title: File Name - description: Display name for a file - file_id: - anyOf: - - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - - type: string - pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ - title: File Id - description: THIS IS the unique ID for the file. either (api|project_id)/node_id/file_name.ext - for S3 and N:package:UUID for datcore - created_at: - type: string - format: date-time - title: Created At - last_modified: - type: string - format: date-time - title: Last Modified - file_size: - anyOf: - - type: integer - enum: - - -1 - const: -1 - - type: integer - minimum: 0 - title: File Size - description: File size in bytes (-1 means invalid) - default: -1 - entity_tag: - anyOf: - - type: string - - type: 'null' - title: Entity Tag - description: Entity tag (or ETag), represents a specific version of the - file, None if invalid upload or datcore - is_soft_link: - type: boolean - title: Is Soft Link - description: If true, this file is a soft link.i.e. is another entry with - the same object_name - default: false - is_directory: - type: boolean - title: Is Directory - description: if True this is a directory - default: false - sha256_checksum: - anyOf: - - type: string - pattern: ^[a-fA-F0-9]{64}$ - - type: 'null' - title: Sha256 Checksum - description: 'SHA256 message digest of the file content. Main purpose: cheap - lookup.' - type: object - required: - - file_uuid - - location_id - - file_name - - file_id - - created_at - - last_modified - title: FileMetaDataGet - FileUploadCompleteFutureResponse: - properties: - state: - $ref: '#/components/schemas/FileUploadCompleteState' - e_tag: - anyOf: - - type: string - - type: 'null' - title: E Tag - type: object - required: - - state - title: FileUploadCompleteFutureResponse - FileUploadCompleteLinks: - properties: - state: - type: string - minLength: 1 - format: uri - title: State - type: object - required: - - state - title: FileUploadCompleteLinks - FileUploadCompleteResponse: - properties: - links: - $ref: '#/components/schemas/FileUploadCompleteLinks' - type: object - required: - - links - title: FileUploadCompleteResponse - FileUploadCompleteState: - type: string - enum: - - ok - - nok - title: FileUploadCompleteState - FileUploadCompletionBody: - properties: - parts: - items: - $ref: '#/components/schemas/UploadedPart' - type: array - title: Parts - type: object - required: - - parts - title: FileUploadCompletionBody - FileUploadLinks: - properties: - abort_upload: - type: string - minLength: 1 - format: uri - title: Abort Upload - complete_upload: - type: string - minLength: 1 - format: uri - title: Complete Upload - type: object - required: - - abort_upload - - complete_upload - title: FileUploadLinks - FileUploadSchema: - properties: - chunk_size: - type: integer - minimum: 0 - title: Chunk Size - urls: - items: - type: string - minLength: 1 - format: uri - type: array - title: Urls - links: - $ref: '#/components/schemas/FileUploadLinks' - type: object - required: - - chunk_size - - urls - - links - title: FileUploadSchema - FoldersBody: - properties: - source: - type: object - title: Source - destination: - type: object - title: Destination - nodes_map: - additionalProperties: - type: string - format: uuid - type: object - title: Nodes Map - type: object - title: FoldersBody - HealthCheck: - properties: - name: - anyOf: - - type: string - - type: 'null' - title: Name - status: - anyOf: - - type: string - - type: 'null' - title: Status - api_version: - anyOf: - - type: string - - type: 'null' - title: Api Version - version: - anyOf: - - type: string - - type: 'null' - title: Version - type: object - required: - - name - - status - - api_version - - version - title: HealthCheck - LinkType: - type: string - enum: - - PRESIGNED - - S3 - title: LinkType - PresignedLink: - properties: - link: - type: string - minLength: 1 - format: uri - title: Link - type: object - required: - - link - title: PresignedLink - S3Settings: - properties: - S3_ACCESS_KEY: - type: string - maxLength: 50 - minLength: 1 - title: S3 Access Key - S3_BUCKET_NAME: - type: string - maxLength: 50 - minLength: 1 - title: S3 Bucket Name - S3_ENDPOINT: - anyOf: - - type: string - minLength: 1 - format: uri - - type: 'null' - title: S3 Endpoint - description: do not define if using standard AWS - S3_REGION: - type: string - maxLength: 50 - minLength: 1 - title: S3 Region - S3_SECRET_KEY: - type: string - maxLength: 50 - minLength: 1 - title: S3 Secret Key - additionalProperties: false - type: object - required: - - S3_ACCESS_KEY - - S3_BUCKET_NAME - - S3_REGION - - S3_SECRET_KEY - title: S3Settings - SoftCopyBody: - properties: - link_id: - type: string - pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ - title: Link Id - type: object - required: - - link_id - title: SoftCopyBody - TableSynchronisation: - properties: - dry_run: - anyOf: - - type: boolean - - type: 'null' - title: Dry Run - fire_and_forget: - anyOf: - - type: boolean - - type: 'null' - title: Fire And Forget - removed: - items: - type: string - type: array - title: Removed - type: object - required: - - removed - title: TableSynchronisation - TaskGet: - properties: - task_id: - type: string - title: Task Id - task_name: - type: string - title: Task Name - status_href: - type: string - title: Status Href - result_href: - type: string - title: Result Href - abort_href: - type: string - title: Abort Href - type: object - required: - - task_id - - task_name - - status_href - - result_href - - abort_href - title: TaskGet - TaskProgress: - properties: - task_id: - anyOf: - - type: string - - type: 'null' - title: Task Id - message: - type: string - title: Message - default: '' - percent: - type: number - maximum: 1.0 - minimum: 0.0 - title: Percent - default: 0.0 - type: object - title: TaskProgress - description: 'Helps the user to keep track of the progress. Progress is expected - to be - - defined as a float bound between 0.0 and 1.0' - TaskStatus: - properties: - task_progress: - $ref: '#/components/schemas/TaskProgress' - done: - type: boolean - title: Done - started: - type: string - format: date-time - title: Started - type: object - required: - - task_progress - - done - - started - title: TaskStatus - UploadedPart: - properties: - number: - type: integer - exclusiveMinimum: true - title: Number - minimum: 0 - e_tag: - type: string - title: E Tag - type: object - required: - - number - - e_tag - title: UploadedPart -tags: -- name: datasets -- name: files -- name: health -- name: locations -- name: tasks -- name: simcore-s3 diff --git a/services/storage/src/simcore_service_storage/application.py b/services/storage/src/simcore_service_storage/application.py deleted file mode 100644 index 16aa8f837eb..00000000000 --- a/services/storage/src/simcore_service_storage/application.py +++ /dev/null @@ -1,108 +0,0 @@ -""" Main's application module for simcore_service_storage service - - Functions to create, setup and run an aiohttp application provided a settingsuration object -""" - -import logging -from typing import Final - -from aiohttp import web -from servicelib.aiohttp.application import create_safe_application -from servicelib.aiohttp.application_keys import APP_CONFIG_KEY -from servicelib.aiohttp.dev_error_logger import setup_dev_error_logger -from servicelib.aiohttp.monitoring import setup_monitoring -from servicelib.aiohttp.profiler_middleware import profiling_middleware -from servicelib.aiohttp.tracing import setup_tracing -from settings_library.tracing import TracingSettings - -from ._meta import APP_NAME, APP_STARTED_BANNER_MSG, VERSION -from .db import setup_db -from .dsm import setup_dsm -from .dsm_cleaner import setup_dsm_cleaner -from .long_running_tasks import setup_rest_api_long_running_tasks -from .redis import setup_redis -from .rest import setup_rest -from .s3 import setup_s3 -from .settings import Settings -from .utils_handlers import dsm_exception_handler - -_ACCESS_LOG_FORMAT: Final[ - str -] = '%a %t "%r" %s %b [%Dus] "%{Referer}i" "%{User-Agent}i"' - -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aiobotocore", - "aio_pika", - "aiormq", - "botocore", - "sqlalchemy", -) -_logger = logging.getLogger(__name__) - - -def create(settings: Settings) -> web.Application: - _logger.debug( - "Initializing app with settings:\n%s", - settings.model_dump_json(indent=2), - ) - - app = create_safe_application(None) - app[APP_CONFIG_KEY] = settings - # Tracing - tracing_settings: TracingSettings | None = app[APP_CONFIG_KEY].STORAGE_TRACING - if tracing_settings: - setup_tracing( - app, - tracing_settings=tracing_settings, - service_name=APP_NAME, - ) - - setup_db(app) - setup_s3(app) - - setup_rest_api_long_running_tasks(app) - setup_rest(app) - - setup_dsm(app) - if settings.STORAGE_CLEANER_INTERVAL_S: - setup_redis(app) - setup_dsm_cleaner(app) - - app.middlewares.append(dsm_exception_handler) - - if settings.STORAGE_PROFILING: - app.middlewares.append(profiling_middleware) - - if settings.LOG_LEVEL == "DEBUG": - setup_dev_error_logger(app) - - if settings.STORAGE_MONITORING_ENABLED: - setup_monitoring(app, APP_NAME, version=f"{VERSION}") - - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - - return app - - -def run(settings: Settings, app: web.Application | None = None): - _logger.debug("Serving application ") - if not app: - app = create(settings) - - async def welcome_banner(_app: web.Application): - print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 - - app.on_startup.append(welcome_banner) - - web.run_app( - app, - host=settings.STORAGE_HOST, - port=settings.STORAGE_PORT, - access_log_format=_ACCESS_LOG_FORMAT, - ) diff --git a/services/storage/src/simcore_service_storage/cli.py b/services/storage/src/simcore_service_storage/cli.py index bf37855e6c8..bcf4086f4aa 100644 --- a/services/storage/src/simcore_service_storage/cli.py +++ b/services/storage/src/simcore_service_storage/cli.py @@ -1,44 +1,97 @@ import logging +import os import typer -from servicelib.logging_utils import config_all_loggers -from settings_library.utils_cli import create_settings_command +from settings_library.postgres import PostgresSettings +from settings_library.s3 import S3Settings +from settings_library.utils_cli import ( + create_settings_command, + create_version_callback, + print_as_envfile, +) -from . import application -from .settings import Settings +from ._meta import PROJECT_NAME, __version__ +from .core.settings import ApplicationSettings LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -main = typer.Typer(name="simcore-service-storage service") +# NOTE: 'main' variable is referred in the setup's entrypoint! +main = typer.Typer(name=PROJECT_NAME) -main.command()(create_settings_command(settings_cls=Settings, logger=log)) +main.command()( + create_settings_command(settings_cls=ApplicationSettings, logger=_logger) +) +main.callback()(create_version_callback(__version__)) @main.command() def run(): """Runs application""" - typer.secho("Resolving settings ...", nl=False) - settings_obj = Settings.create_from_envs() - typer.secho("DONE", fg=typer.colors.GREEN) - - logging.basicConfig(level=settings_obj.log_level) - logging.root.setLevel(settings_obj.log_level) - config_all_loggers( - log_format_local_dev_enabled=settings_obj.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings_obj.STORAGE_LOG_FILTER_MAPPING, - tracing_settings=settings_obj.STORAGE_TRACING, + typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") + typer.secho( + f"$ uvicorn {PROJECT_NAME}.main:the_app", + fg=typer.colors.BLUE, ) - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + +@main.command() +def echo_dotenv(ctx: typer.Context, *, minimal: bool = True) -> None: + """Generates and displays a valid environment variables file (also known as dot-envfile) + + Usage: + $ simcore-service echo-dotenv > .env + $ cat .env + $ set -o allexport; source .env; set +o allexport + """ + assert ctx # nosec + + # NOTE: we normally DO NOT USE `os.environ` to capture env vars but this is a special case + # The idea here is to have a command that can generate a **valid** `.env` file that can be used + # to initialized the app. For that reason we fill required fields of the `ApplicationSettings` with + # "fake" but valid values (e.g. generating a password or adding tags as `replace-with-api-key). + # Nonetheless, if the caller of this CLI has already some **valid** env vars in the environment we want to use them ... + # and that is why we use `os.environ`. + + settings = ApplicationSettings.create_from_envs( + STORAGE_POSTGRES=os.environ.get( + "STORAGE_POSTGRES", + PostgresSettings.create_from_envs( + POSTGRES_HOST=os.environ.get( + "POSTGRES_HOST", "replace-with-postgres-host" + ), + POSTGRES_USER=os.environ.get( + "POSTGRES_USER", "replace-with-postgres-user" + ), + POSTGRES_DB=os.environ.get("POSTGRES_DB", "replace-with-postgres-db"), + POSTGRES_PASSWORD=os.environ.get( + "POSTGRES_PASSWORD", "replace-with-postgres-password" + ), + ), + ), + STORAGE_S3=os.environ.get( # nosec + "STORAGE_S3", + S3Settings.create_from_envs( + S3_BUCKET_NAME=os.environ.get("S3_BUCKET", "replace-with-s3-bucket"), + S3_ACCESS_KEY=os.environ.get( + "S3_ACCESS_KEY", "replace-with-s3-access-key" + ), + S3_SECRET_KEY=os.environ.get( + "S3_SECRET_KEY", "replace-with-s3-secret-key" + ), + S3_ENDPOINT=os.environ.get( + "S3_ENDPOINT", "https://s3.replace-with-s3-endpoint" + ), + S3_REGION=os.environ.get("S3_REGION", "replace-with-s3-region"), + ), + ), ) - logging.getLogger("engineio").setLevel(quiet_level) - logging.getLogger("openapi_spec_validator").setLevel(quiet_level) - logging.getLogger("sqlalchemy").setLevel(quiet_level) - logging.getLogger("sqlalchemy.engine").setLevel(quiet_level) - typer.secho("Starting app ... ") - application.run(settings_obj) + print_as_envfile( + settings, + compact=False, + verbose=True, + show_secrets=True, + exclude_unset=minimal, + ) diff --git a/services/storage/src/simcore_service_storage/constants.py b/services/storage/src/simcore_service_storage/constants.py index 498a7c0eebb..fecbfb54e87 100644 --- a/services/storage/src/simcore_service_storage/constants.py +++ b/services/storage/src/simcore_service_storage/constants.py @@ -1,16 +1,14 @@ from typing import Final from aws_library.s3 import PRESIGNED_LINK_MAX_SIZE, S3_MAX_FILE_SIZE -from models_library.api_schemas_storage import LinkType +from models_library.api_schemas_storage.storage_schemas import LinkType from pydantic import ByteSize -from servicelib.aiohttp import application_keys RETRY_WAIT_SECS = 2 MAX_CHUNK_SIZE = 1024 MINUTE = 60 - -APP_CONFIG_KEY = application_keys.APP_CONFIG_KEY # app-storage-key for config object +UPLOAD_TASKS_KEY = f"{__name__}.upload_tasks" # DSM locations SIMCORE_S3_ID = 0 @@ -39,12 +37,7 @@ MAX_CONCURRENT_REST_CALLS: Final[int] = 10 # DATABASE ---------------------------- -APP_AIOPG_ENGINE_KEY = f"{__name__}.aiopg_engine" MAX_CONCURRENT_DB_TASKS: Final[int] = 2 -# DATA STORAGE MANAGER ---------------------------------- -APP_DSM_KEY = f"{__name__}.DSM" -APP_S3_KEY = f"{__name__}.S3_CLIENT" - EXPAND_DIR_MAX_ITEM_COUNT: Final[int] = 1000 diff --git a/services/storage/src/simcore_service_storage/core/__init__.py b/services/storage/src/simcore_service_storage/core/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/core/application.py b/services/storage/src/simcore_service_storage/core/application.py new file mode 100644 index 00000000000..b70c2cef302 --- /dev/null +++ b/services/storage/src/simcore_service_storage/core/application.py @@ -0,0 +1,131 @@ +"""Main's application module for simcore_service_storage service + +Functions to create, setup and run an aiohttp application provided a settingsuration object +""" + +import logging + +from common_library.basic_types import BootModeEnum +from fastapi import FastAPI +from fastapi.middleware.gzip import GZipMiddleware +from fastapi_pagination import add_pagination +from servicelib.fastapi import timing_middleware +from servicelib.fastapi.cancellation_middleware import RequestCancellationMiddleware +from servicelib.fastapi.client_session import setup_client_session +from servicelib.fastapi.openapi import override_fastapi_openapi_method +from servicelib.fastapi.profiler import ProfilerMiddleware +from servicelib.fastapi.prometheus_instrumentation import ( + setup_prometheus_instrumentation, +) +from servicelib.fastapi.tracing import initialize_tracing +from starlette.middleware.base import BaseHTTPMiddleware + +from .._meta import ( + API_VERSION, + API_VTAG, + APP_FINISHED_BANNER_MSG, + APP_NAME, + APP_STARTED_BANNER_MSG, + APP_WORKER_STARTED_BANNER_MSG, +) +from ..api.rest.routes import setup_rest_api_routes +from ..api.rpc.routes import setup_rpc_api_routes +from ..dsm import setup_dsm +from ..dsm_cleaner import setup_dsm_cleaner +from ..exceptions.handlers import set_exception_handlers +from ..modules.celery import setup_celery_client +from ..modules.db import setup_db +from ..modules.long_running_tasks import setup_rest_api_long_running_tasks_for_uploads +from ..modules.rabbitmq import setup as setup_rabbitmq +from ..modules.redis import setup as setup_redis +from ..modules.s3 import setup_s3 +from .settings import ApplicationSettings + +_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR +_NOISY_LOGGERS = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "urllib3", + "werkzeug", +) +_logger = logging.getLogger(__name__) + + +def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 + # keep mostly quiet noisy loggers + quiet_level: int = max( + min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + ) + for name in _NOISY_LOGGERS: + logging.getLogger(name).setLevel(quiet_level) + + _logger.info("app settings: %s", settings.model_dump_json(indent=1)) + + app = FastAPI( + debug=settings.SC_BOOT_MODE + in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], + title=APP_NAME, + description="Service that manages osparc storage backend", + version=API_VERSION, + openapi_url=f"/api/{API_VTAG}/openapi.json", + docs_url="/dev/doc", + redoc_url=None, # default disabled + ) + override_fastapi_openapi_method(app) + add_pagination(app) + + # STATE + app.state.settings = settings + + setup_db(app) + setup_s3(app) + setup_client_session(app) + + if not settings.STORAGE_WORKER_MODE: + setup_rabbitmq(app) + setup_rpc_api_routes(app) + setup_celery_client(app) + setup_rest_api_long_running_tasks_for_uploads(app) + setup_rest_api_routes(app, API_VTAG) + set_exception_handlers(app) + + setup_redis(app) + + setup_dsm(app) + if settings.STORAGE_CLEANER_INTERVAL_S and not settings.STORAGE_WORKER_MODE: + setup_dsm_cleaner(app) + + if settings.STORAGE_PROFILING: + app.add_middleware(ProfilerMiddleware) + + if settings.SC_BOOT_MODE != BootModeEnum.PRODUCTION: + # middleware to time requests (ONLY for development) + app.add_middleware( + BaseHTTPMiddleware, dispatch=timing_middleware.add_process_time_header + ) + + app.add_middleware(GZipMiddleware) + + app.add_middleware(RequestCancellationMiddleware) + + if settings.STORAGE_TRACING: + initialize_tracing(app, settings.STORAGE_TRACING, APP_NAME) + if settings.STORAGE_MONITORING_ENABLED: + setup_prometheus_instrumentation(app) + + async def _on_startup() -> None: + if settings.STORAGE_WORKER_MODE: + print(APP_WORKER_STARTED_BANNER_MSG, flush=True) # noqa: T201 + else: + print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 + + async def _on_shutdown() -> None: + print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + return app diff --git a/services/storage/src/simcore_service_storage/resources.py b/services/storage/src/simcore_service_storage/core/resources.py similarity index 100% rename from services/storage/src/simcore_service_storage/resources.py rename to services/storage/src/simcore_service_storage/core/resources.py diff --git a/services/storage/src/simcore_service_storage/core/settings.py b/services/storage/src/simcore_service_storage/core/settings.py new file mode 100644 index 00000000000..4d246a89eeb --- /dev/null +++ b/services/storage/src/simcore_service_storage/core/settings.py @@ -0,0 +1,138 @@ +from typing import Annotated, Self + +from fastapi import FastAPI +from pydantic import AliasChoices, Field, PositiveInt, field_validator, model_validator +from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from settings_library.application import BaseApplicationSettings +from settings_library.basic_types import LogLevel, PortInt +from settings_library.celery import CelerySettings +from settings_library.postgres import PostgresSettings +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings +from settings_library.s3 import S3Settings +from settings_library.tracing import TracingSettings +from settings_library.utils_logging import MixinLoggingSettings + +from ..modules.datcore_adapter.datcore_adapter_settings import DatcoreAdapterSettings + + +class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): + STORAGE_HOST: str = "0.0.0.0" # nosec + STORAGE_PORT: PortInt = 8080 + + LOG_LEVEL: Annotated[ + LogLevel, + Field( + validation_alias=AliasChoices("STORAGE_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), + ), + ] = LogLevel.INFO + + STORAGE_MONITORING_ENABLED: bool = False + STORAGE_PROFILING: bool = False + + STORAGE_POSTGRES: Annotated[ + PostgresSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + STORAGE_REDIS: Annotated[ + RedisSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] + + STORAGE_S3: Annotated[ + S3Settings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] + + STORAGE_CELERY: Annotated[ + CelerySettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] + + STORAGE_TRACING: Annotated[ + TracingSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] + + DATCORE_ADAPTER: Annotated[ + DatcoreAdapterSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] + + STORAGE_SYNC_METADATA_TIMEOUT: Annotated[ + PositiveInt, Field(180, description="Timeout (seconds) for metadata sync task") + ] + + STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS: Annotated[ + int, + Field( + 3600, description="Default expiration time in seconds for presigned links" + ), + ] + + STORAGE_CLEANER_INTERVAL_S: Annotated[ + int | None, + Field( + 30, + description="Interval in seconds when task cleaning pending uploads runs. setting to NULL disables the cleaner.", + ), + ] + + STORAGE_RABBITMQ: Annotated[ + RabbitSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] + + STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY: Annotated[ + int, + Field( + 4, + description="Maximal amount of threads used by underlying S3 client to transfer data to S3 backend", + ), + ] + + STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + default=False, + validation_alias=AliasChoices( + "STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development _logger format. WARNING: make sure it is disabled if you want to have structured logs!", + ), + ] + + STORAGE_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "STORAGE_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of _logger message patterns that should be filtered out.", + ), + ] + + STORAGE_WORKER_MODE: Annotated[ + bool, Field(description="If True, run as a worker") + ] = False + + @field_validator("LOG_LEVEL", mode="before") + @classmethod + def _validate_loglevel(cls, value: str) -> str: + log_level: str = cls.validate_log_level(value) + return log_level + + @model_validator(mode="after") + def _ensure_settings_consistency(self) -> Self: + if self.STORAGE_CLEANER_INTERVAL_S is not None and not self.STORAGE_REDIS: + msg = ( + "STORAGE_CLEANER_INTERVAL_S cleaner cannot be set without STORAGE_REDIS! " + "Please correct settings." + ) + raise ValueError(msg) + return self + + +def get_application_settings(app: FastAPI) -> ApplicationSettings: + assert isinstance(app.state.settings, ApplicationSettings) # nosec + return app.state.settings diff --git a/services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter.py b/services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter.py deleted file mode 100644 index 8fb9a162c52..00000000000 --- a/services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter.py +++ /dev/null @@ -1,253 +0,0 @@ -import logging -from collections.abc import Callable -from math import ceil -from typing import Any, TypeVar, cast - -import aiohttp -from aiohttp import web -from aiohttp.client import ClientSession -from models_library.api_schemas_storage import DatCoreDatasetName -from models_library.users import UserID -from pydantic import AnyUrl, TypeAdapter -from servicelib.aiohttp.application_keys import APP_CONFIG_KEY -from servicelib.aiohttp.client_session import get_client_session -from servicelib.utils import logged_gather - -from ..constants import DATCORE_ID, DATCORE_STR, MAX_CONCURRENT_REST_CALLS -from ..models import DatasetMetaData, FileMetaData -from .datcore_adapter_exceptions import ( - DatcoreAdapterClientError, - DatcoreAdapterError, - DatcoreAdapterTimeoutError, -) - -log = logging.getLogger(__file__) - - -class _DatcoreAdapterResponseError(DatcoreAdapterError): - """Basic exception for response errors""" - - def __init__(self, status: int, reason: str) -> None: - self.status = status - self.reason = reason - super().__init__( - msg=f"forwarded call failed with status {status}, reason {reason}" - ) - - -async def _request( - app: web.Application, - api_key: str, - api_secret: str, - method: str, - path: str, - *, - json: dict[str, Any] | None = None, - params: dict[str, Any] | None = None, - **request_kwargs, -) -> dict[str, Any] | list[dict[str, Any]]: - datcore_adapter_settings = app[APP_CONFIG_KEY].DATCORE_ADAPTER - url = datcore_adapter_settings.endpoint + path - session: ClientSession = get_client_session(app) - - try: - if request_kwargs is None: - request_kwargs = {} - async with session.request( - method, - url, - raise_for_status=True, - headers={ - "x-datcore-api-key": api_key, - "x-datcore-api-secret": api_secret, - }, - json=json, - params=params, - **request_kwargs, - ) as response: - response_data = await response.json() - assert isinstance(response_data, dict | list) # nosec - return response_data - - except aiohttp.ClientResponseError as exc: - raise _DatcoreAdapterResponseError(status=exc.status, reason=f"{exc}") from exc - - except TimeoutError as exc: - msg = f"datcore-adapter server timed-out: {exc}" - raise DatcoreAdapterTimeoutError(msg) from exc - - except aiohttp.ClientError as exc: - msg = f"unexpected client error: {exc}" - raise DatcoreAdapterClientError(msg) from exc - - -_T = TypeVar("_T") - - -async def _retrieve_all_pages( - app: web.Application, - api_key: str, - api_secret: str, - method: str, - path: str, - return_type_creator: Callable[..., _T], -) -> list[_T]: - page = 1 - objs = [] - while ( - response := cast( - dict[str, Any], - await _request( - app, api_key, api_secret, method, path, params={"page": page} - ), - ) - ) and response.get("items"): - log.debug( - "called %s [%d/%d], received %d objects", - path, - page, - ceil(response.get("total", -1) / response.get("size", 1)), - len(response.get("items", [])), - ) - - objs += [return_type_creator(d) for d in response.get("items", [])] - page += 1 - return objs - - -async def check_service_health(app: web.Application) -> bool: - datcore_adapter_settings = app[APP_CONFIG_KEY].DATCORE_ADAPTER - url = datcore_adapter_settings.endpoint + "/ready" - session: ClientSession = get_client_session(app) - try: - await session.get(url, raise_for_status=True) - except (TimeoutError, aiohttp.ClientError): - return False - return True - - -async def check_user_can_connect( - app: web.Application, api_key: str, api_secret: str -) -> bool: - if not api_key or not api_secret: - # no need to ask, datcore is an authenticated service - return False - - try: - await _request(app, api_key, api_secret, "GET", "/user/profile") - return True - except DatcoreAdapterError: - return False - - -async def list_all_datasets_files_metadatas( - app: web.Application, user_id: UserID, api_key: str, api_secret: str -) -> list[FileMetaData]: - all_datasets: list[DatasetMetaData] = await list_datasets(app, api_key, api_secret) - results = await logged_gather( - *( - list_all_files_metadatas_in_dataset( - app, - user_id, - api_key, - api_secret, - cast(DatCoreDatasetName, d.dataset_id), - ) - for d in all_datasets - ), - log=log, - max_concurrency=MAX_CONCURRENT_REST_CALLS, - ) - all_files_of_all_datasets: list[FileMetaData] = [] - for data in results: - all_files_of_all_datasets += data - return all_files_of_all_datasets - - -_LIST_ALL_DATASETS_TIMEOUT_S = 60 - - -async def list_all_files_metadatas_in_dataset( - app: web.Application, - user_id: UserID, - api_key: str, - api_secret: str, - dataset_id: DatCoreDatasetName, -) -> list[FileMetaData]: - all_files: list[dict[str, Any]] = cast( - list[dict[str, Any]], - await _request( - app, - api_key, - api_secret, - "GET", - f"/datasets/{dataset_id}/files_legacy", - timeout=aiohttp.ClientTimeout(total=_LIST_ALL_DATASETS_TIMEOUT_S), - ), - ) - return [ - FileMetaData.model_construct( - file_uuid=d["path"], - location_id=DATCORE_ID, - location=DATCORE_STR, - bucket_name=d["dataset_id"], - object_name=d["path"], - file_name=d["name"], - file_id=d["package_id"], - file_size=d["size"], - created_at=d["created_at"], - last_modified=d["last_modified_at"], - project_id=None, - node_id=None, - user_id=user_id, - is_soft_link=False, - ) - for d in all_files - ] - - -async def list_datasets( - app: web.Application, api_key: str, api_secret: str -) -> list[DatasetMetaData]: - all_datasets: list[DatasetMetaData] = await _retrieve_all_pages( - app, - api_key, - api_secret, - "GET", - "/datasets", - lambda d: DatasetMetaData(dataset_id=d["id"], display_name=d["display_name"]), - ) - - return all_datasets - - -async def get_file_download_presigned_link( - app: web.Application, api_key: str, api_secret: str, file_id: str -) -> AnyUrl: - file_download_data = cast( - dict[str, Any], - await _request(app, api_key, api_secret, "GET", f"/files/{file_id}"), - ) - url: AnyUrl = TypeAdapter(AnyUrl).validate_python(file_download_data["link"]) - return url - - -async def get_package_files( - app: web.Application, api_key: str, api_secret: str, package_id: str -) -> list[dict[str, Any]]: - return cast( - list[dict[str, Any]], - await _request( - app, - api_key, - api_secret, - "GET", - f"/packages/{package_id}/files", - ), - ) - - -async def delete_file( - app: web.Application, api_key: str, api_secret: str, file_id: str -) -> None: - await _request(app, api_key, api_secret, "DELETE", f"/files/{file_id}") diff --git a/services/storage/src/simcore_service_storage/datcore_dsm.py b/services/storage/src/simcore_service_storage/datcore_dsm.py index 3ced68fa3f4..fef1aa992d6 100644 --- a/services/storage/src/simcore_service_storage/datcore_dsm.py +++ b/services/storage/src/simcore_service_storage/datcore_dsm.py @@ -1,8 +1,14 @@ +import contextlib from dataclasses import dataclass +from pathlib import Path -from aiohttp import web -from models_library.api_schemas_storage import ( +import arrow +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + UNDEFINED_SIZE_TYPE, + DatCoreCollectionName, DatCoreDatasetName, + DatCorePackageName, LinkType, UploadedPart, ) @@ -10,22 +16,54 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize +from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter, ValidationError from .constants import DATCORE_ID, DATCORE_STR -from .datcore_adapter import datcore_adapter -from .datcore_adapter.datcore_adapter_exceptions import DatcoreAdapterMultipleFilesError -from .db_tokens import get_api_token_and_secret from .dsm_factory import BaseDataManager -from .models import DatasetMetaData, FileMetaData, UploadLinks +from .exceptions.errors import DatCoreCredentialsMissingError +from .models import ( + DatasetMetaData, + FileMetaData, + GenericCursor, + PathMetaData, + TotalNumber, + UploadLinks, +) +from .modules.datcore_adapter import datcore_adapter +from .modules.datcore_adapter.datcore_adapter_exceptions import ( + DatcoreAdapterMultipleFilesError, +) +from .modules.db import get_db_engine +from .modules.db.tokens import TokenRepository + + +def _check_api_credentials( + api_token: str | None, api_secret: str | None +) -> tuple[str, str]: + if not api_token or not api_secret: + raise DatCoreCredentialsMissingError + assert api_token is not None + assert api_secret is not None + return api_token, api_secret + + +def _is_collection(file_filter: Path) -> bool: + with contextlib.suppress(ValidationError): + TypeAdapter(DatCoreCollectionName).validate_python(file_filter.parts[1]) + return True + return False @dataclass class DatCoreDataManager(BaseDataManager): - app: web.Application + app: FastAPI - async def _get_datcore_tokens(self, user_id: UserID): - return await get_api_token_and_secret(self.app, user_id) + async def _get_datcore_tokens( + self, user_id: UserID + ) -> tuple[str | None, str | None]: + return await TokenRepository.instance( + get_db_engine(self.app) + ).get_api_token_and_secret(user_id=user_id) @classmethod def get_location_id(cls) -> LocationID: @@ -45,16 +83,171 @@ async def authorized(self, user_id: UserID) -> bool: async def list_datasets(self, user_id: UserID) -> list[DatasetMetaData]: api_token, api_secret = await self._get_datcore_tokens(user_id) - return await datcore_adapter.list_datasets(self.app, api_token, api_secret) + api_token, api_secret = _check_api_credentials(api_token, api_secret) + return await datcore_adapter.list_all_datasets(self.app, api_token, api_secret) async def list_files_in_dataset( self, user_id: UserID, dataset_id: str, *, expand_dirs: bool ) -> list[FileMetaData]: api_token, api_secret = await self._get_datcore_tokens(user_id) + api_token, api_secret = _check_api_credentials(api_token, api_secret) return await datcore_adapter.list_all_files_metadatas_in_dataset( - self.app, user_id, api_token, api_secret, DatCoreDatasetName(dataset_id) + self.app, user_id, api_token, api_secret, dataset_id ) + async def list_paths( + self, + user_id: UserID, + *, + file_filter: Path | None, + cursor: GenericCursor | None, + limit: NonNegativeInt, + ) -> tuple[list[PathMetaData], GenericCursor | None, TotalNumber | None]: + """returns a page of the file meta data a user has access to""" + api_token, api_secret = await self._get_datcore_tokens(user_id) + api_token, api_secret = _check_api_credentials(api_token, api_secret) + if not file_filter: + datasets, next_cursor, total = await datcore_adapter.list_datasets( + self.app, + api_key=api_token, + api_secret=api_secret, + cursor=cursor, + limit=limit, + ) + return ( + [ + PathMetaData( + path=Path(f"{dataset.dataset_id}"), + display_path=Path(f"{dataset.display_name}"), + location_id=self.location_id, + location=self.location_name, + bucket_name="fake", + project_id=None, + node_id=None, + user_id=user_id, + created_at=arrow.utcnow().datetime, + last_modified=arrow.utcnow().datetime, + file_meta_data=None, + ) + for dataset in datasets + ], + next_cursor, + total, + ) + assert len(file_filter.parts) + + if len(file_filter.parts) == 1: + # this is looking into a dataset + return await datcore_adapter.list_top_level_objects_in_dataset( + self.app, + user_id=user_id, + api_key=api_token, + api_secret=api_secret, + dataset_id=TypeAdapter(DatCoreDatasetName).validate_python( + file_filter.parts[0] + ), + cursor=cursor, + limit=limit, + ) + assert len(file_filter.parts) == 2 + + if _is_collection(file_filter): + # this is a collection + return await datcore_adapter.list_top_level_objects_in_collection( + self.app, + user_id=user_id, + api_key=api_token, + api_secret=api_secret, + dataset_id=TypeAdapter(DatCoreDatasetName).validate_python( + file_filter.parts[0] + ), + collection_id=TypeAdapter(DatCoreCollectionName).validate_python( + file_filter.parts[1] + ), + cursor=cursor, + limit=limit, + ) + assert TypeAdapter(DatCorePackageName).validate_python( + file_filter.parts[1] + ) # nosec + + # only other option is a file or maybe a partial?? that would be bad + return ( + [ + await datcore_adapter.get_package_file_as_path( + self.app, + user_id=user_id, + api_key=api_token, + api_secret=api_secret, + dataset_id=TypeAdapter(DatCoreDatasetName).validate_python( + file_filter.parts[0] + ), + package_id=TypeAdapter(DatCorePackageName).validate_python( + file_filter.parts[1] + ), + ) + ], + None, + 1, + ) + + async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: + """returns the total size of an arbitrary path""" + api_token, api_secret = await self._get_datcore_tokens(user_id) + api_token, api_secret = _check_api_credentials(api_token, api_secret) + + # if this is a dataset we might have the size directly + with contextlib.suppress(ValidationError): + dataset_id = TypeAdapter(DatCoreDatasetName).validate_python(f"{path}") + _, dataset_size = await datcore_adapter.get_dataset( + self.app, + api_key=api_token, + api_secret=api_secret, + dataset_id=dataset_id, + ) + if dataset_size is not None: + return dataset_size + + # generic computation (slow and unoptimized - could be improved if necessary by using datcore data better) + try: + accumulated_size = ByteSize(0) + paths_to_process = [path] + + while paths_to_process: + current_path = paths_to_process.pop() + paths, cursor, _ = await self.list_paths( + user_id, file_filter=current_path, cursor=None, limit=50 + ) + + while paths: + for p in paths: + if p.file_meta_data is not None: + # this is a file + assert ( + p.file_meta_data.file_size is not UNDEFINED_SIZE_TYPE + ) # nosec + assert isinstance( + p.file_meta_data.file_size, ByteSize + ) # nosec + accumulated_size = ByteSize( + accumulated_size + p.file_meta_data.file_size + ) + continue + paths_to_process.append(p.path) + + if cursor: + paths, cursor, _ = await self.list_paths( + user_id, file_filter=current_path, cursor=cursor, limit=50 + ) + else: + break + + return accumulated_size + + except ValidationError: + # invalid path + return ByteSize(0) + async def list_files( self, user_id: UserID, @@ -64,34 +257,37 @@ async def list_files( project_id: ProjectID | None, ) -> list[FileMetaData]: api_token, api_secret = await self._get_datcore_tokens(user_id) + api_token, api_secret = _check_api_credentials(api_token, api_secret) return await datcore_adapter.list_all_datasets_files_metadatas( self.app, user_id, api_token, api_secret ) async def get_file(self, user_id: UserID, file_id: StorageFileID) -> FileMetaData: api_token, api_secret = await self._get_datcore_tokens(user_id) + api_token, api_secret = _check_api_credentials(api_token, api_secret) package_files = await datcore_adapter.get_package_files( - self.app, api_token, api_secret, file_id + self.app, api_key=api_token, api_secret=api_secret, package_id=file_id ) if not len(package_files) == 1: raise DatcoreAdapterMultipleFilesError( msg=f"{len(package_files)} files in package, this breaks the current assumption" ) - resp_data = package_files[0]["content"] + + file = package_files[0] return FileMetaData( file_uuid=file_id, location_id=DATCORE_ID, location=DATCORE_STR, - bucket_name=resp_data["s3bucket"], + bucket_name=file.s3_bucket, object_name=file_id, - file_name=resp_data["filename"], + file_name=file.filename, file_id=file_id, - file_size=resp_data["size"], - created_at=resp_data["createdAt"], - last_modified=resp_data["updatedAt"], + file_size=file.size, + created_at=file.created_at, + last_modified=file.updated_at, project_id=None, node_id=None, user_id=user_id, @@ -126,14 +322,16 @@ async def create_file_download_link( self, user_id: UserID, file_id: StorageFileID, link_type: LinkType ) -> AnyUrl: api_token, api_secret = await self._get_datcore_tokens(user_id) + api_token, api_secret = _check_api_credentials(api_token, api_secret) return await datcore_adapter.get_file_download_presigned_link( self.app, api_token, api_secret, file_id ) async def delete_file(self, user_id: UserID, file_id: StorageFileID) -> None: api_token, api_secret = await self._get_datcore_tokens(user_id) + api_token, api_secret = _check_api_credentials(api_token, api_secret) await datcore_adapter.delete_file(self.app, api_token, api_secret, file_id) -def create_datcore_data_manager(app: web.Application) -> DatCoreDataManager: +def create_datcore_data_manager(app: FastAPI) -> DatCoreDataManager: return DatCoreDataManager(app) diff --git a/services/storage/src/simcore_service_storage/db.py b/services/storage/src/simcore_service_storage/db.py deleted file mode 100644 index 2dbb7dc8704..00000000000 --- a/services/storage/src/simcore_service_storage/db.py +++ /dev/null @@ -1,77 +0,0 @@ -import logging -from typing import Any - -from aiohttp import web -from aiopg.sa.engine import Engine -from servicelib.aiohttp.aiopg_utils import is_pg_responsive -from servicelib.common_aiopg_utils import DataSourceName, create_pg_engine -from servicelib.retry_policies import PostgresRetryPolicyUponInitialization -from settings_library.postgres import PostgresSettings -from simcore_postgres_database.utils_aiopg import ( - get_pg_engine_stateinfo, - raise_if_migration_not_ready, -) -from tenacity import retry - -from .constants import APP_AIOPG_ENGINE_KEY, APP_CONFIG_KEY - -_logger = logging.getLogger(__name__) - - -@retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs) -async def _ensure_pg_ready(dsn: DataSourceName, min_size: int, max_size: int) -> None: - _logger.info("Checking pg is ready %s", dsn) - - async with create_pg_engine(dsn, minsize=min_size, maxsize=max_size) as engine: - await raise_if_migration_not_ready(engine) - - -async def postgres_cleanup_ctx(app: web.Application): - pg_cfg: PostgresSettings = app[APP_CONFIG_KEY].STORAGE_POSTGRES - dsn = DataSourceName( - application_name=f"{__name__}_{id(app)}", - database=pg_cfg.POSTGRES_DB, - user=pg_cfg.POSTGRES_USER, - password=pg_cfg.POSTGRES_PASSWORD.get_secret_value(), - host=pg_cfg.POSTGRES_HOST, - port=pg_cfg.POSTGRES_PORT, - ) - - await _ensure_pg_ready( - dsn, min_size=pg_cfg.POSTGRES_MINSIZE, max_size=pg_cfg.POSTGRES_MAXSIZE - ) - _logger.info("Creating pg engine for %s", dsn) - async with create_pg_engine( - dsn, minsize=pg_cfg.POSTGRES_MINSIZE, maxsize=pg_cfg.POSTGRES_MAXSIZE - ) as engine: - - assert engine # nosec - app[APP_AIOPG_ENGINE_KEY] = engine - - _logger.info("Created pg engine for %s", dsn) - yield # ---------- - _logger.info("Deleting pg engine for %s", dsn) - _logger.info("Deleted pg engine for %s", dsn) - - -async def is_service_responsive(app: web.Application) -> bool: - """Returns true if the app can connect to db service""" - return await is_pg_responsive(engine=app[APP_AIOPG_ENGINE_KEY]) - - -def get_engine_state(app: web.Application) -> dict[str, Any]: - engine: Engine | None = app.get(APP_AIOPG_ENGINE_KEY) - if engine: - engine_info: dict[str, Any] = get_pg_engine_stateinfo(engine) - return engine_info - return {} - - -def setup_db(app: web.Application): - app[APP_AIOPG_ENGINE_KEY] = None - - # app is created at this point but not yet started - _logger.debug("Setting up %s [service: %s] ...", __name__, "postgres") - - # async connection to db - app.cleanup_ctx.append(postgres_cleanup_ctx) diff --git a/services/storage/src/simcore_service_storage/db_file_meta_data.py b/services/storage/src/simcore_service_storage/db_file_meta_data.py deleted file mode 100644 index 593a48f72b2..00000000000 --- a/services/storage/src/simcore_service_storage/db_file_meta_data.py +++ /dev/null @@ -1,207 +0,0 @@ -import datetime -from collections.abc import AsyncGenerator - -import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from models_library.basic_types import SHA256Str -from models_library.projects import ProjectID -from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder -from simcore_postgres_database.storage_models import file_meta_data -from sqlalchemy import and_, literal_column -from sqlalchemy.dialects.postgresql import insert as pg_insert - -from .exceptions import FileMetaDataNotFoundError -from .models import FileMetaData, FileMetaDataAtDB, UserOrProjectFilter - - -async def exists(conn: SAConnection, file_id: SimcoreS3FileID) -> bool: - return bool( - await conn.scalar( - sa.select(sa.func.count()) - .select_from(file_meta_data) - .where(file_meta_data.c.file_id == file_id) - ) - == 1 - ) - - -async def upsert( - conn: SAConnection, fmd: FileMetaData | FileMetaDataAtDB -) -> FileMetaDataAtDB: - # NOTE: upsert file_meta_data, if the file already exists, we update the whole row - # so we get the correct time stamps - fmd_db = ( - FileMetaDataAtDB.model_validate(fmd) if isinstance(fmd, FileMetaData) else fmd - ) - insert_statement = pg_insert(file_meta_data).values(**jsonable_encoder(fmd_db)) - on_update_statement = insert_statement.on_conflict_do_update( - index_elements=[file_meta_data.c.file_id], set_=jsonable_encoder(fmd_db) - ).returning(literal_column("*")) - result = await conn.execute(on_update_statement) - row = await result.first() - assert row # nosec - return FileMetaDataAtDB.model_validate(row) - - -async def insert(conn: SAConnection, fmd: FileMetaData) -> FileMetaDataAtDB: - fmd_db = FileMetaDataAtDB.model_validate(fmd) - result = await conn.execute( - file_meta_data.insert() - .values(jsonable_encoder(fmd_db)) - .returning(literal_column("*")) - ) - row = await result.first() - assert row # nosec - return FileMetaDataAtDB.model_validate(row) - - -async def get(conn: SAConnection, file_id: SimcoreS3FileID) -> FileMetaDataAtDB: - result = await conn.execute( - query=sa.select(file_meta_data).where(file_meta_data.c.file_id == file_id) - ) - if row := await result.first(): - return FileMetaDataAtDB.model_validate(row) - raise FileMetaDataNotFoundError(file_id=file_id) - - -def _list_filter_with_partial_file_id_stmt( - *, - user_or_project_filter: UserOrProjectFilter, - file_id_prefix: str | None, - partial_file_id: str | None, - sha256_checksum: SHA256Str | None, - is_directory: bool | None, - limit: int | None = None, - offset: int | None = None, -): - conditions: list = [] - - # Checks access rights (project can be owned or shared) - user_id = user_or_project_filter.user_id - if user_id is not None: - project_ids = user_or_project_filter.project_ids - conditions.append( - sa.or_( - file_meta_data.c.user_id == f"{user_id}", - ( - file_meta_data.c.project_id.in_(f"{_}" for _ in project_ids) - if project_ids - else False - ), - ) - ) - - # Optional filters - if file_id_prefix: - conditions.append(file_meta_data.c.file_id.startswith(file_id_prefix)) - if partial_file_id: - conditions.append(file_meta_data.c.file_id.ilike(f"%{partial_file_id}%")) - if is_directory is not None: - conditions.append(file_meta_data.c.is_directory.is_(is_directory)) - if sha256_checksum: - conditions.append(file_meta_data.c.sha256_checksum == sha256_checksum) - - return ( - sa.select(file_meta_data) - .where(sa.and_(*conditions)) - .order_by(file_meta_data.c.created_at.asc()) # sorted as oldest first - .offset(offset) - .limit(limit) - ) - - -async def list_filter_with_partial_file_id( - conn: SAConnection, - *, - user_or_project_filter: UserOrProjectFilter, - file_id_prefix: str | None, - partial_file_id: str | None, - sha256_checksum: SHA256Str | None, - is_directory: bool | None, - limit: int | None = None, - offset: int | None = None, -) -> list[FileMetaDataAtDB]: - - stmt = _list_filter_with_partial_file_id_stmt( - user_or_project_filter=user_or_project_filter, - file_id_prefix=file_id_prefix, - partial_file_id=partial_file_id, - sha256_checksum=sha256_checksum, - is_directory=is_directory, - limit=limit, - offset=offset, - ) - - return [ - FileMetaDataAtDB.model_validate(row) async for row in await conn.execute(stmt) - ] - - -async def list_fmds( - conn: SAConnection, - *, - user_id: UserID | None = None, - project_ids: list[ProjectID] | None = None, - file_ids: list[SimcoreS3FileID] | None = None, - expired_after: datetime.datetime | None = None, -) -> list[FileMetaDataAtDB]: - stmt = sa.select(file_meta_data).where( - and_( - (file_meta_data.c.user_id == f"{user_id}") if user_id else True, - ( - (file_meta_data.c.project_id.in_([f"{p}" for p in project_ids])) - if project_ids - else True - ), - (file_meta_data.c.file_id.in_(file_ids)) if file_ids else True, - ( - (file_meta_data.c.upload_expires_at < expired_after) - if expired_after - else True - ), - ) - ) - - return [ - FileMetaDataAtDB.model_validate(row) async for row in await conn.execute(stmt) - ] - - -async def total(conn: SAConnection) -> int: - """returns the number of uploaded file entries""" - return ( - await conn.scalar(sa.select(sa.func.count()).select_from(file_meta_data)) or 0 - ) - - -async def list_valid_uploads( - conn: SAConnection, -) -> AsyncGenerator[FileMetaDataAtDB, None]: - """returns all the theoretically valid fmds (e.g. upload_expires_at column is null)""" - async for row in conn.execute( - sa.select(file_meta_data).where( - file_meta_data.c.upload_expires_at == None # lgtm [py/test-equals-none] - ) - ): - fmd_at_db = FileMetaDataAtDB.model_validate(row) - yield fmd_at_db - - -async def delete(conn: SAConnection, file_ids: list[SimcoreS3FileID]) -> None: - await conn.execute( - file_meta_data.delete().where(file_meta_data.c.file_id.in_(file_ids)) - ) - - -async def delete_all_from_project(conn: SAConnection, project_id: ProjectID) -> None: - await conn.execute( - file_meta_data.delete().where(file_meta_data.c.project_id == f"{project_id}") - ) - - -async def delete_all_from_node(conn: SAConnection, node_id: NodeID) -> None: - await conn.execute( - file_meta_data.delete().where(file_meta_data.c.node_id == f"{node_id}") - ) diff --git a/services/storage/src/simcore_service_storage/db_projects.py b/services/storage/src/simcore_service_storage/db_projects.py deleted file mode 100644 index dc680c491ee..00000000000 --- a/services/storage/src/simcore_service_storage/db_projects.py +++ /dev/null @@ -1,39 +0,0 @@ -from collections.abc import AsyncIterator -from contextlib import suppress - -import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from models_library.projects import ProjectAtDB, ProjectID -from pydantic import ValidationError -from simcore_postgres_database.storage_models import projects - - -async def list_valid_projects_in( - conn: SAConnection, - include_uuids: list[ProjectID], -) -> AsyncIterator[ProjectAtDB]: - """ - - NOTE that it lists ONLY validated projects in 'project_uuids' - """ - async for row in conn.execute( - sa.select(projects).where( - projects.c.uuid.in_(f"{pid}" for pid in include_uuids) - ) - ): - with suppress(ValidationError): - yield ProjectAtDB.model_validate(row) - - -async def project_exists( - conn: SAConnection, - project_uuid: ProjectID, -) -> bool: - return bool( - await conn.scalar( - sa.select(sa.func.count()) - .select_from(projects) - .where(projects.c.uuid == f"{project_uuid}") - ) - == 1 - ) diff --git a/services/storage/src/simcore_service_storage/db_tokens.py b/services/storage/src/simcore_service_storage/db_tokens.py deleted file mode 100644 index 445a7c220d1..00000000000 --- a/services/storage/src/simcore_service_storage/db_tokens.py +++ /dev/null @@ -1,42 +0,0 @@ -import logging -from typing import Any - -import sqlalchemy as sa -from aiohttp import web -from aiopg.sa.engine import Engine -from models_library.users import UserID -from simcore_postgres_database.storage_models import tokens - -from .constants import APP_AIOPG_ENGINE_KEY, APP_CONFIG_KEY - -log = logging.getLogger(__name__) - - -async def _get_tokens_from_db(engine: Engine, user_id: UserID) -> dict[str, Any]: - async with engine.acquire() as conn: - result = await conn.execute( - sa.select( - tokens, - ).where(tokens.c.user_id == user_id) - ) - row = await result.first() - return dict(row) if row else {} - - -async def get_api_token_and_secret( - app: web.Application, user_id: UserID -) -> tuple[str, str]: - # from the client side together with the userid? - engine = app[APP_AIOPG_ENGINE_KEY] - - # defaults from config if any, othewise None - api_token = app[APP_CONFIG_KEY].BF_API_KEY - api_secret = app[APP_CONFIG_KEY].BF_API_SECRET - - data = await _get_tokens_from_db(engine, user_id) - - data = data.get("token_data", {}) - api_token = data.get("token_key", api_token) - api_secret = data.get("token_secret", api_secret) - - return api_token, api_secret diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index 4d69f32bd99..64d72a29404 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -1,17 +1,18 @@ import logging +from typing import cast -from aiohttp import web +from fastapi import FastAPI -from .constants import APP_DSM_KEY from .datcore_dsm import DatCoreDataManager, create_datcore_data_manager from .dsm_factory import DataManagerProvider +from .exceptions.errors import ConfigurationError from .simcore_s3_dsm import SimcoreS3DataManager, create_simcore_s3_data_manager logger = logging.getLogger(__name__) -def setup_dsm(app: web.Application): - async def _cleanup_context(app: web.Application): +def setup_dsm(app: FastAPI) -> None: + async def _on_startup() -> None: dsm_provider = DataManagerProvider(app) dsm_provider.register_builder( SimcoreS3DataManager.get_location_id(), @@ -23,17 +24,22 @@ async def _cleanup_context(app: web.Application): create_datcore_data_manager, DatCoreDataManager, ) - app[APP_DSM_KEY] = dsm_provider + app.state.dsm_provider = dsm_provider - yield - - logger.info("Shuting down %s", f"{dsm_provider=}") + async def _on_shutdown() -> None: + if app.state.dsm_provider: + # nothing to do + ... # ------ - app.cleanup_ctx.append(_cleanup_context) + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) -def get_dsm_provider(app: web.Application) -> DataManagerProvider: - dsm_provider: DataManagerProvider = app[APP_DSM_KEY] - return dsm_provider +def get_dsm_provider(app: FastAPI) -> DataManagerProvider: + if not app.state.dsm_provider: + raise ConfigurationError( + msg="DSM provider not available. Please check the configuration." + ) + return cast(DataManagerProvider, app.state.dsm_provider) diff --git a/services/storage/src/simcore_service_storage/dsm_cleaner.py b/services/storage/src/simcore_service_storage/dsm_cleaner.py index fe3fcf897ea..d09c83e4f5d 100644 --- a/services/storage/src/simcore_service_storage/dsm_cleaner.py +++ b/services/storage/src/simcore_service_storage/dsm_cleaner.py @@ -23,15 +23,14 @@ from datetime import timedelta from typing import cast -from aiohttp import web +from fastapi import FastAPI from servicelib.async_utils import cancel_wait_task from servicelib.background_task_utils import exclusive_periodic -from servicelib.logging_utils import log_catch, log_context +from servicelib.logging_utils import log_context -from .constants import APP_CONFIG_KEY, APP_DSM_KEY -from .dsm_factory import DataManagerProvider -from .redis import get_redis_client -from .settings import Settings +from .core.settings import get_application_settings +from .dsm import get_dsm_provider +from .modules.redis import get_redis_client from .simcore_s3_dsm import SimcoreS3DataManager _logger = logging.getLogger(__name__) @@ -39,37 +38,35 @@ _TASK_NAME_PERIODICALY_CLEAN_DSM = "periodic_cleanup_of_dsm" -async def dsm_cleaner_task(app: web.Application) -> None: - _logger.info("starting dsm cleaner task...") - dsm: DataManagerProvider = app[APP_DSM_KEY] - simcore_s3_dsm: SimcoreS3DataManager = cast( - SimcoreS3DataManager, dsm.get(SimcoreS3DataManager.get_location_id()) - ) - await simcore_s3_dsm.clean_expired_uploads() +async def dsm_cleaner_task(app: FastAPI) -> None: + with log_context(_logger, logging.INFO, "dsm cleaner task"): + dsm = get_dsm_provider(app) + simcore_s3_dsm: SimcoreS3DataManager = cast( + SimcoreS3DataManager, dsm.get(SimcoreS3DataManager.get_location_id()) + ) + await simcore_s3_dsm.clean_expired_uploads() -def setup_dsm_cleaner(app: web.Application): - async def _setup(app: web.Application): - with ( - log_context(_logger, logging.INFO, msg="setup dsm cleaner"), - log_catch(_logger, reraise=False), - ): - cfg: Settings = app[APP_CONFIG_KEY] - assert cfg.STORAGE_CLEANER_INTERVAL_S # nosec +def setup_dsm_cleaner(app: FastAPI) -> None: + async def _on_startup() -> None: + cfg = get_application_settings(app) + assert cfg.STORAGE_CLEANER_INTERVAL_S # nosec - @exclusive_periodic( - get_redis_client(app), - task_interval=timedelta(seconds=cfg.STORAGE_CLEANER_INTERVAL_S), - retry_after=timedelta(minutes=5), - ) - async def _periodic_dsm_clean() -> None: - await dsm_cleaner_task(app) + @exclusive_periodic( + get_redis_client(app), + task_interval=timedelta(seconds=cfg.STORAGE_CLEANER_INTERVAL_S), + retry_after=timedelta(minutes=5), + ) + async def _periodic_dsm_clean() -> None: + await dsm_cleaner_task(app) - storage_background_task = asyncio.create_task( - _periodic_dsm_clean(), name=_TASK_NAME_PERIODICALY_CLEAN_DSM - ) - yield + app.state.dsm_cleaner_task = asyncio.create_task( + _periodic_dsm_clean(), name=_TASK_NAME_PERIODICALY_CLEAN_DSM + ) - await cancel_wait_task(storage_background_task) + async def _on_shutdown() -> None: + assert isinstance(app.state.dsm_cleaner_task, asyncio.Task) # nosec + await cancel_wait_task(app.state.dsm_cleaner_task) - app.cleanup_ctx.append(_setup) + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) diff --git a/services/storage/src/simcore_service_storage/dsm_factory.py b/services/storage/src/simcore_service_storage/dsm_factory.py index c84ca4f15db..749bbf9a5e6 100644 --- a/services/storage/src/simcore_service_storage/dsm_factory.py +++ b/services/storage/src/simcore_service_storage/dsm_factory.py @@ -1,16 +1,24 @@ from abc import ABC, abstractmethod from collections.abc import Callable from dataclasses import dataclass, field +from pathlib import Path -from aiohttp import web -from models_library.api_schemas_storage import LinkType, UploadedPart +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import LinkType, UploadedPart from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize +from pydantic import AnyUrl, ByteSize, NonNegativeInt -from .models import DatasetMetaData, FileMetaData, UploadLinks +from .models import ( + DatasetMetaData, + FileMetaData, + GenericCursor, + PathMetaData, + TotalNumber, + UploadLinks, +) class BaseDataManager(ABC): @@ -61,6 +69,21 @@ async def list_files( """returns all the file meta data a user has access to (uuid_filter and or project_id may be used)""" # NOTE: expand_dirs will be replaced by pagination in the future + @abstractmethod + async def list_paths( + self, + user_id: UserID, + *, + file_filter: Path | None, + cursor: GenericCursor | None, + limit: NonNegativeInt, + ) -> tuple[list[PathMetaData], GenericCursor | None, TotalNumber | None]: + """returns a page of the file meta data a user has access to""" + + @abstractmethod + async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: + """returns the total size of an arbitrary path""" + @abstractmethod async def get_file(self, user_id: UserID, file_id: StorageFileID) -> FileMetaData: """returns the file meta data of file_id if user_id has the rights to""" @@ -105,17 +128,17 @@ async def delete_file(self, user_id: UserID, file_id: StorageFileID) -> None: @dataclass class DataManagerProvider: - app: web.Application + app: FastAPI _builders: dict[ LocationID, - tuple[Callable[[web.Application], BaseDataManager], type[BaseDataManager]], + tuple[Callable[[FastAPI], BaseDataManager], type[BaseDataManager]], ] = field(default_factory=dict) _services: list[BaseDataManager] = field(default_factory=list) def register_builder( self, location_id: LocationID, - builder: Callable[[web.Application], BaseDataManager], + builder: Callable[[FastAPI], BaseDataManager], dsm_type: type[BaseDataManager], ): self._builders[location_id] = (builder, dsm_type) diff --git a/services/storage/src/simcore_service_storage/exceptions/__init__.py b/services/storage/src/simcore_service_storage/exceptions/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/exceptions.py b/services/storage/src/simcore_service_storage/exceptions/errors.py similarity index 65% rename from services/storage/src/simcore_service_storage/exceptions.py rename to services/storage/src/simcore_service_storage/exceptions/errors.py index 937a3afdd06..5856a2fec5b 100644 --- a/services/storage/src/simcore_service_storage/exceptions.py +++ b/services/storage/src/simcore_service_storage/exceptions/errors.py @@ -5,6 +5,10 @@ class StorageRuntimeError(OsparcErrorMixin, RuntimeError): ... +class ConfigurationError(StorageRuntimeError): + msg_template: str = "Application misconfiguration: {msg}" + + class DatabaseAccessError(StorageRuntimeError): msg_template: str = "Unexpected error while accessing database backend" @@ -29,3 +33,15 @@ class ProjectNotFoundError(DatabaseAccessError): class LinkAlreadyExistsError(DatabaseAccessError): msg_template: str = "The link {file_id} already exists" + + +class AccessLayerError(StorageRuntimeError): + msg_template: str = "Database access layer error" + + +class InvalidFileIdentifierError(AccessLayerError): + msg_template: str = "Error in {identifier}: {details}" + + +class DatCoreCredentialsMissingError(StorageRuntimeError): + msg_template: str = "DatCore credentials are incomplete. TIP: Check your settings" diff --git a/services/storage/src/simcore_service_storage/exceptions/handlers.py b/services/storage/src/simcore_service_storage/exceptions/handlers.py new file mode 100644 index 00000000000..78a93d8f46c --- /dev/null +++ b/services/storage/src/simcore_service_storage/exceptions/handlers.py @@ -0,0 +1,98 @@ +import logging + +from asyncpg.exceptions import PostgresError +from aws_library.s3 import S3AccessError, S3KeyNotFoundError +from fastapi import FastAPI, status +from servicelib.fastapi.http_error import ( + make_http_error_handler_for_exception, + set_app_default_http_error_handlers, +) + +from ..modules.datcore_adapter.datcore_adapter_exceptions import ( + DatcoreAdapterFileNotFoundError, + DatcoreAdapterTimeoutError, +) +from .errors import ( + DatCoreCredentialsMissingError, + FileAccessRightError, + FileMetaDataNotFoundError, + InvalidFileIdentifierError, + LinkAlreadyExistsError, + ProjectAccessRightError, + ProjectNotFoundError, +) + +_logger = logging.getLogger(__name__) + + +def set_exception_handlers(app: FastAPI) -> None: + set_app_default_http_error_handlers(app) + + # + # add custom exception handlers + # + app.add_exception_handler( + InvalidFileIdentifierError, + make_http_error_handler_for_exception( + status.HTTP_422_UNPROCESSABLE_ENTITY, + InvalidFileIdentifierError, + envelope_error=True, + ), + ) + for exc_not_found in ( + FileMetaDataNotFoundError, + S3KeyNotFoundError, + ProjectNotFoundError, + DatcoreAdapterFileNotFoundError, + ): + app.add_exception_handler( + exc_not_found, + make_http_error_handler_for_exception( + status.HTTP_404_NOT_FOUND, exc_not_found, envelope_error=True + ), + ) + for exc_access in ( + FileAccessRightError, + ProjectAccessRightError, + ): + app.add_exception_handler( + exc_access, + make_http_error_handler_for_exception( + status.HTTP_403_FORBIDDEN, exc_access, envelope_error=True + ), + ) + app.add_exception_handler( + LinkAlreadyExistsError, + make_http_error_handler_for_exception( + status.HTTP_422_UNPROCESSABLE_ENTITY, + LinkAlreadyExistsError, + envelope_error=True, + ), + ) + for exc_3rd_party in ( + PostgresError, + S3AccessError, + ): + app.add_exception_handler( + exc_3rd_party, + make_http_error_handler_for_exception( + status.HTTP_503_SERVICE_UNAVAILABLE, exc_3rd_party, envelope_error=True + ), + ) + + app.add_exception_handler( + DatcoreAdapterTimeoutError, + make_http_error_handler_for_exception( + status.HTTP_504_GATEWAY_TIMEOUT, + DatcoreAdapterTimeoutError, + envelope_error=True, + ), + ) + app.add_exception_handler( + DatCoreCredentialsMissingError, + make_http_error_handler_for_exception( + status.HTTP_401_UNAUTHORIZED, + DatCoreCredentialsMissingError, + envelope_error=True, + ), + ) diff --git a/services/storage/src/simcore_service_storage/handlers_datasets.py b/services/storage/src/simcore_service_storage/handlers_datasets.py deleted file mode 100644 index 2cd510bcf79..00000000000 --- a/services/storage/src/simcore_service_storage/handlers_datasets.py +++ /dev/null @@ -1,74 +0,0 @@ -import logging - -from aiohttp import web -from aiohttp.web import RouteTableDef -from common_library.json_serialization import json_dumps -from models_library.api_schemas_storage import FileMetaDataGet -from models_library.utils.fastapi_encoders import jsonable_encoder -from servicelib.aiohttp.requests_validation import ( - parse_request_path_parameters_as, - parse_request_query_parameters_as, -) - -# Exclusive for simcore-s3 storage ----------------------- -from ._meta import API_VTAG -from .dsm import get_dsm_provider -from .models import ( - FileMetaData, - FilesMetadataDatasetPathParams, - FilesMetadataDatasetQueryParams, - LocationPathParams, - StorageQueryParamsBase, -) - -log = logging.getLogger(__name__) - -routes = RouteTableDef() - -UPLOAD_TASKS_KEY = f"{__name__}.upload_tasks" - - -@routes.get( - f"/{API_VTAG}/locations/{{location_id}}/datasets", name="get_datasets_metadata" -) -async def get_datasets_metadata(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - path_params = parse_request_path_parameters_as(LocationPathParams, request) - log.debug( - "received call to get_datasets_metadata with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = get_dsm_provider(request.app).get(path_params.location_id) - return web.json_response( - {"data": await dsm.list_datasets(query_params.user_id)}, dumps=json_dumps - ) - - -@routes.get( - f"/{API_VTAG}/locations/{{location_id}}/datasets/{{dataset_id}}/metadata", - name="get_files_metadata_dataset", -) -async def get_files_metadata_dataset(request: web.Request) -> web.Response: - query_params: FilesMetadataDatasetQueryParams = parse_request_query_parameters_as( - FilesMetadataDatasetQueryParams, request - ) - path_params = parse_request_path_parameters_as( - FilesMetadataDatasetPathParams, request - ) - log.debug( - "received call to get_files_metadata_dataset with %s", - f"{path_params=}, {query_params=}", - ) - dsm = get_dsm_provider(request.app).get(path_params.location_id) - data: list[FileMetaData] = await dsm.list_files_in_dataset( - user_id=query_params.user_id, - dataset_id=path_params.dataset_id, - expand_dirs=query_params.expand_dirs, - ) - return web.json_response( - {"data": [jsonable_encoder(FileMetaDataGet(**d.model_dump())) for d in data]}, - dumps=json_dumps, - ) diff --git a/services/storage/src/simcore_service_storage/handlers_files.py b/services/storage/src/simcore_service_storage/handlers_files.py deleted file mode 100644 index 0f78fdaeea5..00000000000 --- a/services/storage/src/simcore_service_storage/handlers_files.py +++ /dev/null @@ -1,414 +0,0 @@ -import asyncio -import logging -import urllib.parse -from typing import cast - -from aiohttp import web -from aiohttp.web import RouteTableDef -from common_library.json_serialization import json_dumps -from models_library.api_schemas_storage import ( - FileMetaDataGet, - FileUploadCompleteFutureResponse, - FileUploadCompleteLinks, - FileUploadCompleteResponse, - FileUploadCompleteState, - FileUploadCompletionBody, - FileUploadLinks, - FileUploadSchema, - SoftCopyBody, -) -from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, ByteSize, TypeAdapter -from servicelib.aiohttp import status -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_path_parameters_as, - parse_request_query_parameters_as, -) - -from ._meta import API_VTAG -from .dsm import get_dsm_provider -from .exceptions import FileMetaDataNotFoundError -from .models import ( - CopyAsSoftLinkParams, - FileDownloadQueryParams, - FileMetaData, - FilePathIsUploadCompletedParams, - FilePathParams, - FilesMetadataQueryParams, - FileUploadQueryParams, - LocationPathParams, - StorageQueryParamsBase, - UploadLinks, -) -from .simcore_s3_dsm import SimcoreS3DataManager -from .utils import create_upload_completion_task_name - -log = logging.getLogger(__name__) - -routes = RouteTableDef() - -UPLOAD_TASKS_KEY = f"{__name__}.upload_tasks" - - -@routes.get( - f"/{API_VTAG}/locations/{{location_id}}/files/metadata", name="get_files_metadata" -) -async def get_files_metadata(request: web.Request) -> web.Response: - query_params: FilesMetadataQueryParams = parse_request_query_parameters_as( - FilesMetadataQueryParams, request - ) - path_params = parse_request_path_parameters_as(LocationPathParams, request) - log.debug( - "received call to get_files_metadata with %s", - f"{path_params=}, {query_params=}", - ) - dsm = get_dsm_provider(request.app).get(path_params.location_id) - data: list[FileMetaData] = await dsm.list_files( - user_id=query_params.user_id, - expand_dirs=query_params.expand_dirs, - uuid_filter=query_params.uuid_filter - or f"{query_params.project_id or ''}", # NOTE: https://github.com/ITISFoundation/osparc-issues/issues/1593 - project_id=query_params.project_id, - ) - return web.json_response( - {"data": [jsonable_encoder(FileMetaDataGet(**d.model_dump())) for d in data]}, - dumps=json_dumps, - ) - - -@routes.get( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}/metadata", - name="get_file_metadata", -) -async def get_file_metadata(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - path_params = parse_request_path_parameters_as(FilePathParams, request) - log.debug( - "received call to get_file_metadata_dataset with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = get_dsm_provider(request.app).get(path_params.location_id) - try: - data = await dsm.get_file( - user_id=query_params.user_id, - file_id=path_params.file_id, - ) - except FileMetaDataNotFoundError: - # NOTE: LEGACY compatibility - # This is what happens Larry... data must be an empty {} or else some old dynamic services will FAIL (sic) - # Cannot remove until we retire all legacy services - # https://github.com/ITISFoundation/osparc-simcore/issues/5676 - # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_enveloped.py#L34 - - return web.json_response( - {"error": "No result found", "data": {}}, dumps=json_dumps - ) - - if request.headers.get("User-Agent") == "OpenAPI-Generator/0.1.0/python": - # NOTE: LEGACY compatiblity with API v0.1.0 - # SEE models used in sdk in: - # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_enveloped.py#L34 - # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_type.py#L34 - return web.json_response( - { - "data": { - "file_uuid": data.file_uuid, - "location_id": data.location_id, - "location": data.location, - "bucket_name": data.bucket_name, - "object_name": data.object_name, - "project_id": data.project_id, - "project_name": data.project_name, - "node_id": data.node_id, - "node_name": data.node_name, - "file_name": data.file_name, - "user_id": data.user_id, - "user_name": None, - }, - "error": None, - }, - dumps=json_dumps, - ) - - return web.json_response( - {"data": jsonable_encoder(FileMetaDataGet(**data.model_dump()))}, - dumps=json_dumps, - ) - - -@routes.get( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}", name="download_file" -) -async def download_file(request: web.Request) -> web.Response: - query_params: FileDownloadQueryParams = parse_request_query_parameters_as( - FileDownloadQueryParams, request - ) - path_params = parse_request_path_parameters_as(FilePathParams, request) - log.debug( - "received call to download_file with %s", - f"{path_params=}, {query_params=}", - ) - dsm = get_dsm_provider(request.app).get(path_params.location_id) - link = await dsm.create_file_download_link( - query_params.user_id, path_params.file_id, query_params.link_type - ) - return web.json_response({"data": {"link": link}}, dumps=json_dumps) - - -@routes.put( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}", name="upload_file" -) -async def upload_file(request: web.Request) -> web.Response: - """creates upload file links: - - This function covers v1 and v2 versions of the handler. - Note: calling this entrypoint on an already existing file will overwrite that file. That file will be deleted - before the upload takes place. - - v1 rationale: - - client calls this handler, which returns a single link (either direct S3 or presigned) to the S3 backend - - client uploads the file - - storage relies on lazy update to find if the file is finished uploaded (when client calls get_file_meta_data, or if the dsm_cleaner goes over it after the upload time is expired) - - v2 rationale: - - client calls this handler, which returns a FileUploadSchema object containing 1 or more links (either S3/presigned links) - - client uploads the file (by chunking it if there are more than 1 presigned link) - - client calls complete_upload handle which will reconstruct the file on S3 backend - - client waits for completion to finish and then the file is accessible on S3 backend - - - Use-case v1: query.file_size is not defined, returns a PresignedLink model (backward compatibility) - Use-case v1.1: if query.link_type=presigned or None, returns a presigned link (limited to a single 5GB file) - Use-case v1.2: if query.link_type=s3, returns a s3 direct link (limited to a single 5TB file) - - User-case v2: query.is_directory is True (query.file_size is forced to -1), returns an s3 path where to upload all the content of the directory - User-case v2: if query.file_size is defined, returns a FileUploadSchema model, expects client to call "complete_upload" when the file is finished uploading - Use-case v2.1: if query.file_size == 0 and query.link_type=presigned or None, returns a single presigned link inside FileUploadSchema (limited to a single 5Gb file) - Use-case v2.2: if query.file_size > 0 and query.link_type=presigned or None, returns 1 or more presigned links depending on the file size (limited to a single 5TB file) - Use-case v2.3: if query.link_type=s3 and query.file_size>=0, returns a single s3 direct link (limited to a single 5TB file) - """ - query_params: FileUploadQueryParams = parse_request_query_parameters_as( - FileUploadQueryParams, request - ) - path_params = parse_request_path_parameters_as(FilePathParams, request) - log.debug( - "received call to upload_file with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = get_dsm_provider(request.app).get(path_params.location_id) - links: UploadLinks = await dsm.create_file_upload_links( - user_id=query_params.user_id, - file_id=path_params.file_id, - link_type=query_params.link_type, - file_size_bytes=query_params.file_size or ByteSize(0), - is_directory=query_params.is_directory, - sha256_checksum=query_params.sha256_checksum, - ) - if query_params.is_v1_upload: - # return v1 response - assert len(links.urls) == 1 # nosec - response = { - "data": {"link": jsonable_encoder(f"{links.urls[0]}", by_alias=True)} - } - log.debug("Returning v1 response: %s", response) - return web.json_response(response, dumps=json_dumps) - - # v2 response - abort_url = request.url.join( - request.app.router["abort_upload_file"] - .url_for( - location_id=f"{path_params.location_id}", - file_id=urllib.parse.quote(path_params.file_id, safe=""), - ) - .with_query(user_id=query_params.user_id) - ) - complete_url = request.url.join( - request.app.router["complete_upload_file"] - .url_for( - location_id=f"{path_params.location_id}", - file_id=urllib.parse.quote(path_params.file_id, safe=""), - ) - .with_query(user_id=query_params.user_id) - ) - v2_response = FileUploadSchema( - chunk_size=links.chunk_size, - urls=links.urls, - links=FileUploadLinks( - abort_upload=TypeAdapter(AnyUrl).validate_python(f"{abort_url}"), - complete_upload=TypeAdapter(AnyUrl).validate_python(f"{complete_url}"), - ), - ) - log.debug("returning v2 response: %s", v2_response) - return jsonable_encoder(v2_response, by_alias=True) # type: ignore[no-any-return] # middleware takes care of enveloping - - -@routes.post( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}:abort", - name="abort_upload_file", -) -async def abort_upload_file(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - path_params = parse_request_path_parameters_as(FilePathParams, request) - log.debug( - "received call to abort_upload_file with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = get_dsm_provider(request.app).get(path_params.location_id) - await dsm.abort_file_upload(query_params.user_id, path_params.file_id) - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -@routes.post( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}:complete", - name="complete_upload_file", -) -async def complete_upload_file(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - path_params = parse_request_path_parameters_as(FilePathParams, request) - body = await parse_request_body_as(FileUploadCompletionBody, request) - log.debug( - "received call to complete_upload_file with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = get_dsm_provider(request.app).get(path_params.location_id) - # NOTE: completing a multipart upload on AWS can take up to several minutes - # therefore we wait a bit to see if it completes fast and return a 204 - # if it returns slow we return a 202 - Accepted, the client will have to check later - # for completeness - task = asyncio.create_task( - dsm.complete_file_upload(path_params.file_id, query_params.user_id, body.parts), - name=create_upload_completion_task_name( - query_params.user_id, path_params.file_id - ), - ) - request.app[UPLOAD_TASKS_KEY][task.get_name()] = task - assert request.transport # nosec - ip_addr, port = request.transport.get_extra_info( - "sockname" - ) # https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseTransport.get_extra_info - route = ( - request.app.router["is_completed_upload_file"] - .url_for( - location_id=f"{path_params.location_id}", - file_id=urllib.parse.quote(path_params.file_id, safe=""), - future_id=task.get_name(), - ) - .with_query(user_id=query_params.user_id) - ) - complete_task_state_url = f"{request.url.scheme}://{ip_addr}:{port}{route}" - response = FileUploadCompleteResponse( - links=FileUploadCompleteLinks( - state=TypeAdapter(AnyUrl).validate_python(complete_task_state_url) - ) - ) - return web.json_response( - status=status.HTTP_202_ACCEPTED, - data={"data": jsonable_encoder(response, by_alias=True)}, - dumps=json_dumps, - ) - - -@routes.post( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}:complete/futures/{{future_id}}", - name="is_completed_upload_file", -) -async def is_completed_upload_file(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - path_params = parse_request_path_parameters_as( - FilePathIsUploadCompletedParams, request - ) - log.debug( - "received call to is completed upload file with %s", - f"{path_params=}, {query_params=}", - ) - - # NOTE: completing a multipart upload on AWS can take up to several minutes - # therefore we wait a bit to see if it completes fast and return a 204 - # if it returns slow we return a 202 - Accepted, the client will have to check later - # for completeness - task_name = create_upload_completion_task_name( - query_params.user_id, path_params.file_id - ) - assert task_name == path_params.future_id # nosec - # first check if the task is in the app - if task := request.app[UPLOAD_TASKS_KEY].get(task_name): - if task.done(): - new_fmd: FileMetaData = task.result() - request.app[UPLOAD_TASKS_KEY].pop(task_name) - response = FileUploadCompleteFutureResponse( - state=FileUploadCompleteState.OK, e_tag=new_fmd.entity_tag - ) - else: - # the task is still running - response = FileUploadCompleteFutureResponse( - state=FileUploadCompleteState.NOK - ) - return jsonable_encoder(response, by_alias=True) # type: ignore[no-any-return] # middleware takes care of enveloping - # there is no task, either wrong call or storage was restarted - # we try to get the file to see if it exists in S3 - dsm = get_dsm_provider(request.app).get(path_params.location_id) - if fmd := await dsm.get_file( - user_id=query_params.user_id, - file_id=path_params.file_id, - ): - response = FileUploadCompleteFutureResponse( - state=FileUploadCompleteState.OK, e_tag=fmd.entity_tag - ) - return jsonable_encoder(response, by_alias=True) # type: ignore[no-any-return] # middleware takes care of enveloping - raise web.HTTPNotFound( - reason="Not found. Upload could not be completed. Please try again and contact support if it fails again." - ) - - -@routes.delete( - f"/{API_VTAG}/locations/{{location_id}}/files/{{file_id}}", name="delete_file" -) -async def delete_file(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - path_params = parse_request_path_parameters_as(FilePathParams, request) - log.debug( - "received call to delete_file with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = get_dsm_provider(request.app).get(path_params.location_id) - await dsm.delete_file(query_params.user_id, path_params.file_id) - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -@routes.post(f"/{API_VTAG}/files/{{file_id}}:soft-copy", name="copy_as_soft_link") -async def copy_as_soft_link(request: web.Request): - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - path_params = parse_request_path_parameters_as(CopyAsSoftLinkParams, request) - body = await parse_request_body_as(SoftCopyBody, request) - log.debug( - "received call to copy_as_soft_link with %s", - f"{path_params=}, {query_params=}, {body=}", - ) - - dsm = cast( - SimcoreS3DataManager, - get_dsm_provider(request.app).get(SimcoreS3DataManager.get_location_id()), - ) - file_link: FileMetaData = await dsm.create_soft_link( - query_params.user_id, path_params.file_id, body.link_id - ) - - return jsonable_encoder(FileMetaDataGet(**file_link.model_dump())) diff --git a/services/storage/src/simcore_service_storage/handlers_health.py b/services/storage/src/simcore_service_storage/handlers_health.py deleted file mode 100644 index eb97bafe1ba..00000000000 --- a/services/storage/src/simcore_service_storage/handlers_health.py +++ /dev/null @@ -1,87 +0,0 @@ -""" - - - Checks connectivity with other services in the backend - -""" - -import logging - -from aiohttp import web -from aws_library.s3 import S3AccessError -from common_library.json_serialization import json_dumps -from models_library.api_schemas_storage import HealthCheck, S3BucketName -from models_library.app_diagnostics import AppStatusCheck -from pydantic import TypeAdapter -from servicelib.rest_constants import RESPONSE_MODEL_POLICY - -from ._meta import API_VERSION, API_VTAG, PROJECT_NAME, VERSION -from .constants import APP_CONFIG_KEY -from .db import get_engine_state -from .db import is_service_responsive as is_pg_responsive -from .s3 import get_s3_client -from .settings import Settings - -log = logging.getLogger(__name__) - -routes = web.RouteTableDef() - - -@routes.get(f"/{API_VTAG}/", name="health_check") -async def get_health(request: web.Request) -> web.Response: - assert request # nosec - return web.json_response( - { - "data": HealthCheck( - name=PROJECT_NAME, - version=f"{VERSION}", - api_version=API_VERSION, - status=None, - ).model_dump(**RESPONSE_MODEL_POLICY) - }, - dumps=json_dumps, - ) - - -@routes.get(f"/{API_VTAG}/status", name="get_status") -async def get_status(request: web.Request) -> web.Response: - # NOTE: all calls here must NOT raise - assert request.app # nosec - app_settings: Settings = request.app[APP_CONFIG_KEY] - s3_state = "disabled" - if app_settings.STORAGE_S3: - try: - s3_state = ( - "connected" - if await get_s3_client(request.app).bucket_exists( - bucket=TypeAdapter(S3BucketName).validate_python( - app_settings.STORAGE_S3.S3_BUCKET_NAME - ) - ) - else "no access to S3 bucket" - ) - except S3AccessError: - s3_state = "failed" - - postgres_state = "disabled" - if app_settings.STORAGE_POSTGRES: - postgres_state = ( - "connected" if await is_pg_responsive(request.app) else "failed" - ) - - status = AppStatusCheck.model_validate( - { - "app_name": PROJECT_NAME, - "version": f"{VERSION}", - "services": { - "postgres": { - "healthy": postgres_state, - "pool": get_engine_state(request.app), - }, - "s3": {"healthy": s3_state}, - }, - } - ) - - return web.json_response( - {"data": status.model_dump(exclude_unset=True)}, dumps=json_dumps - ) diff --git a/services/storage/src/simcore_service_storage/handlers_locations.py b/services/storage/src/simcore_service_storage/handlers_locations.py deleted file mode 100644 index 494c1821f15..00000000000 --- a/services/storage/src/simcore_service_storage/handlers_locations.py +++ /dev/null @@ -1,105 +0,0 @@ -import asyncio -import logging -from typing import cast - -from aiohttp import web -from aiohttp.web import RouteTableDef -from common_library.json_serialization import json_dumps -from models_library.api_schemas_storage import FileLocation -from models_library.projects_nodes_io import StorageFileID -from servicelib.aiohttp.application_keys import ( - APP_CONFIG_KEY, - APP_FIRE_AND_FORGET_TASKS_KEY, -) -from servicelib.aiohttp.requests_validation import ( - parse_request_path_parameters_as, - parse_request_query_parameters_as, -) -from servicelib.utils import fire_and_forget_task - -# Exclusive for simcore-s3 storage ----------------------- -from ._meta import API_VTAG -from .dsm import get_dsm_provider -from .models import LocationPathParams, StorageQueryParamsBase, SyncMetadataQueryParams -from .settings import Settings -from .simcore_s3_dsm import SimcoreS3DataManager - -log = logging.getLogger(__name__) - -routes = RouteTableDef() - - -# HANDLERS --------------------------------------------------- -@routes.get(f"/{API_VTAG}/locations", name="get_storage_locations") -async def get_storage_locations(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - log.debug( - "received call to get_storage_locations with %s", - f"{query_params=}", - ) - dsm_provider = get_dsm_provider(request.app) - location_ids = dsm_provider.locations() - locs: list[FileLocation] = [] - for loc_id in location_ids: - dsm = dsm_provider.get(loc_id) - if await dsm.authorized(query_params.user_id): - locs.append(FileLocation(name=dsm.location_name, id=dsm.location_id)) - - return web.json_response({"error": None, "data": locs}, dumps=json_dumps) - - -@routes.post( - f"/{API_VTAG}/locations/{{location_id}}:sync", name="synchronise_meta_data_table" -) -async def synchronise_meta_data_table(request: web.Request) -> web.Response: - query_params: SyncMetadataQueryParams = parse_request_query_parameters_as( - SyncMetadataQueryParams, request - ) - path_params = parse_request_path_parameters_as(LocationPathParams, request) - log.debug( - "received call to synchronise_meta_data_table with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = cast( - SimcoreS3DataManager, - get_dsm_provider(request.app).get(SimcoreS3DataManager.get_location_id()), - ) - sync_results: list[StorageFileID] = [] - sync_coro = dsm.synchronise_meta_data_table(dry_run=query_params.dry_run) - - if query_params.fire_and_forget: - settings: Settings = request.app[APP_CONFIG_KEY] - - async def _go(): - timeout = settings.STORAGE_SYNC_METADATA_TIMEOUT - try: - result = await asyncio.wait_for(sync_coro, timeout=timeout) - log.info( - "Sync metadata table completed: %d entries removed", - len(result), - ) - except asyncio.TimeoutError: - log.exception("Sync metadata table timed out (%s seconds)", timeout) - - fire_and_forget_task( - _go(), - task_suffix_name="synchronise_meta_data_table", - fire_and_forget_tasks_collection=request.app[APP_FIRE_AND_FORGET_TASKS_KEY], - ) - else: - sync_results = await sync_coro - - return web.json_response( - { - "error": None, - "data": { - "removed": sync_results, - "fire_and_forget": query_params.fire_and_forget, - "dry_run": query_params.dry_run, - }, - }, - dumps=json_dumps, - ) diff --git a/services/storage/src/simcore_service_storage/handlers_simcore_s3.py b/services/storage/src/simcore_service_storage/handlers_simcore_s3.py deleted file mode 100644 index bfbe41e1806..00000000000 --- a/services/storage/src/simcore_service_storage/handlers_simcore_s3.py +++ /dev/null @@ -1,166 +0,0 @@ -import logging -from typing import cast - -from aiohttp import web -from aiohttp.web import RouteTableDef -from common_library.json_serialization import json_dumps -from models_library.api_schemas_storage import FileMetaDataGet, FoldersBody -from models_library.projects import ProjectID -from models_library.utils.fastapi_encoders import jsonable_encoder -from servicelib.aiohttp import status -from servicelib.aiohttp.long_running_tasks.server import ( - TaskProgress, - start_long_running_task, -) -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_path_parameters_as, - parse_request_query_parameters_as, -) -from servicelib.logging_utils import log_context -from settings_library.s3 import S3Settings - -from . import sts -from ._meta import API_VTAG -from .dsm import get_dsm_provider -from .models import ( - DeleteFolderQueryParams, - FileMetaData, - SearchFilesQueryParams, - SimcoreS3FoldersParams, - StorageQueryParamsBase, -) -from .simcore_s3_dsm import SimcoreS3DataManager - -_logger = logging.getLogger(__name__) - -routes = RouteTableDef() - - -@routes.post(f"/{API_VTAG}/simcore-s3:access", name="get_or_create_temporary_s3_access") -async def get_or_create_temporary_s3_access(request: web.Request) -> web.Response: - # NOTE: the name of the method is not accurate, these are not temporary at all - # it returns the credentials of the s3 backend! - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - _logger.debug( - "received call to get_or_create_temporary_s3_access with %s", - f"{query_params=}", - ) - - s3_settings: S3Settings = await sts.get_or_create_temporary_token_for_user( - request.app, query_params.user_id - ) - return web.json_response({"data": s3_settings.model_dump()}, dumps=json_dumps) - - -async def _copy_folders_from_project( - task_progress: TaskProgress, - app: web.Application, - query_params: StorageQueryParamsBase, - body: FoldersBody, -) -> web.Response: - dsm = cast( - SimcoreS3DataManager, - get_dsm_provider(app).get(SimcoreS3DataManager.get_location_id()), - ) - with log_context( - _logger, - logging.INFO, - msg=f"copying {body.source['uuid']} -> {body.destination['uuid']}", - ): - await dsm.deep_copy_project_simcore_s3( - query_params.user_id, - body.source, - body.destination, - body.nodes_map, - task_progress=task_progress, - ) - - return web.json_response( - {"data": jsonable_encoder(body.destination)}, - status=status.HTTP_201_CREATED, - dumps=json_dumps, - ) - - -@routes.post(f"/{API_VTAG}/simcore-s3/folders", name="copy_folders_from_project") -async def copy_folders_from_project(request: web.Request) -> web.Response: - query_params: StorageQueryParamsBase = parse_request_query_parameters_as( - StorageQueryParamsBase, request - ) - body = await parse_request_body_as(FoldersBody, request) - _logger.debug( - "received call to create_folders_from_project with %s", - f"{body=}, {query_params=}", - ) - return await start_long_running_task( - request, - _copy_folders_from_project, # type: ignore[arg-type] - task_context={}, - app=request.app, - query_params=query_params, - body=body, - ) - - -@routes.delete( - f"/{API_VTAG}/simcore-s3/folders/{{folder_id}}", name="delete_folders_of_project" -) -async def delete_folders_of_project(request: web.Request) -> web.Response: - query_params: DeleteFolderQueryParams = parse_request_query_parameters_as( - DeleteFolderQueryParams, request - ) - path_params = parse_request_path_parameters_as(SimcoreS3FoldersParams, request) - _logger.debug( - "received call to delete_folders_of_project with %s", - f"{path_params=}, {query_params=}", - ) - - dsm = cast( - SimcoreS3DataManager, - get_dsm_provider(request.app).get(SimcoreS3DataManager.get_location_id()), - ) - await dsm.delete_project_simcore_s3( - query_params.user_id, - ProjectID(path_params.folder_id), - query_params.node_id, - ) - - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -@routes.post(f"/{API_VTAG}/simcore-s3/files/metadata:search", name="search_files") -async def search_files(request: web.Request) -> web.Response: - query_params: SearchFilesQueryParams = parse_request_query_parameters_as( - SearchFilesQueryParams, request - ) - - _logger.debug( - "received call to search_files with %s", - f"{query_params=}", - ) - - dsm = cast( - SimcoreS3DataManager, - get_dsm_provider(request.app).get(SimcoreS3DataManager.get_location_id()), - ) - - data: list[FileMetaData] = await dsm.search_owned_files( - user_id=query_params.user_id, - file_id_prefix=query_params.startswith, - sha256_checksum=query_params.sha256_checksum, - limit=query_params.limit, - offset=query_params.offset, - ) - _logger.debug( - "Found %d files starting with '%s'", - len(data), - f"{query_params.startswith=}, {query_params.sha256_checksum=}", - ) - - return web.json_response( - {"data": [jsonable_encoder(FileMetaDataGet(**d.model_dump())) for d in data]}, - dumps=json_dumps, - ) diff --git a/services/storage/src/simcore_service_storage/long_running_tasks.py b/services/storage/src/simcore_service_storage/long_running_tasks.py deleted file mode 100644 index 6f0f43c9a8a..00000000000 --- a/services/storage/src/simcore_service_storage/long_running_tasks.py +++ /dev/null @@ -1,11 +0,0 @@ -from aiohttp import web -from servicelib.aiohttp.long_running_tasks.server import setup - -from ._meta import API_VTAG - - -def setup_rest_api_long_running_tasks(app: web.Application) -> None: - setup( - app, - router_prefix=f"/{API_VTAG}/futures", - ) diff --git a/services/storage/src/simcore_service_storage/main.py b/services/storage/src/simcore_service_storage/main.py new file mode 100644 index 00000000000..abf94338662 --- /dev/null +++ b/services/storage/src/simcore_service_storage/main.py @@ -0,0 +1,22 @@ +"""Main application to be deployed in for example uvicorn.""" + +import logging + +from servicelib.logging_utils import config_all_loggers +from simcore_service_storage.core.application import create_app +from simcore_service_storage.core.settings import ApplicationSettings + +_settings = ApplicationSettings.create_from_envs() + +# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 +logging.basicConfig(level=_settings.log_level) # NOSONAR +logging.root.setLevel(_settings.log_level) +config_all_loggers( + log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, + tracing_settings=_settings.STORAGE_TRACING, +) + +_logger = logging.getLogger(__name__) + +app = create_app(_settings) diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index 672694b4fc7..1e4166c89c9 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -1,18 +1,21 @@ import datetime import urllib.parse from dataclasses import dataclass -from typing import Any, Literal, NamedTuple +from pathlib import Path +from typing import Annotated, Any, Literal, NamedTuple, TypeAlias from uuid import UUID import arrow from aws_library.s3 import UploadID -from models_library.api_schemas_storage import ( +from aws_library.s3._models import S3DirectoryMetaData, S3MetaData +from models_library.api_schemas_storage.storage_schemas import ( UNDEFINED_SIZE, UNDEFINED_SIZE_TYPE, DatasetMetaDataGet, ETag, FileMetaDataGet, LinkType, + PathMetaDataGet, S3BucketName, ) from models_library.basic_types import SHA256Str @@ -36,6 +39,8 @@ ByteSize, ConfigDict, Field, + NonNegativeInt, + PlainSerializer, TypeAdapter, field_validator, model_validator, @@ -56,17 +61,31 @@ def is_uuid(value: str) -> bool: class FileMetaDataAtDB(BaseModel): - location_id: LocationID + location_id: Annotated[ + LocationID, PlainSerializer(lambda x: f"{x}", return_type=str) + ] location: LocationName bucket_name: S3BucketName object_name: SimcoreS3FileID - project_id: ProjectID | None = None - node_id: NodeID | None = None - user_id: UserID - created_at: datetime.datetime + project_id: Annotated[ + ProjectID | None, + PlainSerializer( + lambda x: f"{x}" if x is not None else None, return_type=str | None + ), + ] = None + node_id: Annotated[ + NodeID | None, + PlainSerializer( + lambda x: f"{x}" if x is not None else None, return_type=str | None + ), + ] = None + user_id: Annotated[UserID, PlainSerializer(lambda x: f"{x}", return_type=str)] + created_at: Annotated[datetime.datetime, PlainSerializer(lambda x: x.isoformat())] file_id: SimcoreS3FileID file_size: UNDEFINED_SIZE_TYPE | ByteSize - last_modified: datetime.datetime + last_modified: Annotated[ + datetime.datetime, PlainSerializer(lambda x: x.isoformat()) + ] entity_tag: ETag | None = None is_soft_link: bool upload_id: UploadID | None = None @@ -89,6 +108,16 @@ class FileMetaData(FileMetaDataGet): user_id: UserID | None sha256_checksum: SHA256Str | None + def update_display_fields(self, id_name_mapping: dict[str, str]) -> None: + if self.project_id: + # NOTE: this is disabled because the project_name is defined in FileMetaDataGet + # pylint: disable=attribute-defined-outside-init + self.project_name = id_name_mapping.get(f"{self.project_id}") + if self.node_id: + # NOTE: this is disabled because the node_name is defined in FileMetaDataGet + # pylint: disable=attribute-defined-outside-init + self.node_name = id_name_mapping.get(f"{self.node_id}") + @classmethod @validate_call def from_simcore_node( @@ -135,6 +164,30 @@ def from_simcore_node( fmd_kwargs.update(**file_meta_data_kwargs) return cls.model_validate(fmd_kwargs) + @classmethod + def from_db_model(cls, x: FileMetaDataAtDB) -> "FileMetaData": + return cls.model_validate( + x.model_dump() + | {"file_uuid": x.file_id, "file_name": x.file_id.split("/")[-1]} + ) + + @classmethod + def from_s3_object_in_dir( + cls, x: S3MetaData, dir_fmd: "FileMetaData" + ) -> "FileMetaData": + return dir_fmd.model_copy( + update={ + "object_name": x.object_key, + "file_id": x.object_key, + "file_size": x.size, + "entity_tag": x.e_tag, + "sha256_checksum": x.sha256_checksum, + "is_directory": False, + "created_at": x.last_modified, + "last_modified": x.last_modified, + } + ) + @dataclass class UploadLinks: @@ -144,14 +197,18 @@ class UploadLinks: class StorageQueryParamsBase(BaseModel): user_id: UserID - model_config = ConfigDict(populate_by_name=True, extra="forbid") + model_config = ConfigDict(populate_by_name=True) + + +class ListPathsQueryParams(StorageQueryParamsBase): + file_filter: Path | None = None class FilesMetadataDatasetQueryParams(StorageQueryParamsBase): expand_dirs: bool = True -class FilesMetadataQueryParams(StorageQueryParamsBase): +class FileMetadataListQueryParams(StorageQueryParamsBase): project_id: ProjectID | None = None uuid_filter: str = "" expand_dirs: bool = True @@ -162,15 +219,18 @@ class SyncMetadataQueryParams(BaseModel): fire_and_forget: bool = False +class SyncMetadataResponse(BaseModel): + removed: list[StorageFileID] + fire_and_forget: bool + dry_run: bool + + class FileDownloadQueryParams(StorageQueryParamsBase): link_type: LinkType = LinkType.PRESIGNED - @field_validator("link_type", mode="before") - @classmethod - def convert_from_lower_case(cls, v: str) -> str: - if v is not None: - return f"{v}".upper() - return v + +class FileDownloadResponse(BaseModel): + link: AnyUrl class FileUploadQueryParams(StorageQueryParamsBase): @@ -179,13 +239,6 @@ class FileUploadQueryParams(StorageQueryParamsBase): is_directory: bool = False sha256_checksum: SHA256Str | None = None - @field_validator("link_type", mode="before") - @classmethod - def convert_from_lower_case(cls, v: str) -> str: - if v is not None: - return f"{v}".upper() - return v - @model_validator(mode="before") @classmethod def when_directory_force_link_type_and_file_size(cls, data: Any) -> Any: @@ -210,6 +263,10 @@ def is_v1_upload(self) -> bool: return self.file_size is None and self.is_directory is False +class FileUploadResponseV1(BaseModel): + link: AnyUrl + + class DeleteFolderQueryParams(StorageQueryParamsBase): node_id: NodeID | None = None @@ -275,13 +332,77 @@ class UserOrProjectFilter(NamedTuple): project_ids: list[ProjectID] -__all__ = ( - "ETag", - "FileMetaData", - "FileMetaDataAtDB", - "S3BucketName", - "SimcoreS3FileID", - "StorageFileID", - "UploadID", - "UploadLinks", -) +@dataclass(frozen=True) +class AccessRights: + read: bool + write: bool + delete: bool + + @classmethod + def all(cls) -> "AccessRights": + return cls(read=True, write=True, delete=True) + + @classmethod + def none(cls) -> "AccessRights": + return cls(read=False, write=False, delete=False) + + +TotalNumber: TypeAlias = NonNegativeInt +GenericCursor: TypeAlias = str | bytes + + +class PathMetaData(BaseModel): + path: Path + display_path: Annotated[ + Path, + Field( + description="Path with names instead of IDs (URL Encoded by parts as names may contain '/')" + ), + ] + location_id: LocationID + location: LocationName + bucket_name: str + + project_id: ProjectID | None + node_id: NodeID | None + user_id: UserID | None + created_at: datetime.datetime + last_modified: datetime.datetime + + file_meta_data: FileMetaData | None + + def update_display_fields(self, id_name_mapping: dict[str, str]) -> None: + display_path = f"{self.path}" + for old, new in id_name_mapping.items(): + display_path = display_path.replace(old, urllib.parse.quote(new, safe="")) + self.display_path = Path(display_path) + + if self.file_meta_data: + self.file_meta_data.update_display_fields(id_name_mapping) + + @classmethod + def from_s3_object_in_dir( + cls, s3_object: S3MetaData | S3DirectoryMetaData, dir_fmd: FileMetaData + ) -> "PathMetaData": + return cls( + path=s3_object.as_path(), + display_path=s3_object.as_path(), + location_id=dir_fmd.location_id, + location=dir_fmd.location, + bucket_name=dir_fmd.bucket_name, + user_id=dir_fmd.user_id, + project_id=dir_fmd.project_id, + node_id=dir_fmd.node_id, + created_at=dir_fmd.created_at, + last_modified=dir_fmd.last_modified, + file_meta_data=None + if isinstance(s3_object, S3DirectoryMetaData) + else FileMetaData.from_s3_object_in_dir(s3_object, dir_fmd), + ) + + def to_api_model(self) -> PathMetaDataGet: + return PathMetaDataGet.model_construct( + path=self.path, + display_path=self.display_path, + file_meta_data=self.file_meta_data, + ) diff --git a/services/storage/src/simcore_service_storage/modules/__init__.py b/services/storage/src/simcore_service_storage/modules/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/modules/celery/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/__init__.py new file mode 100644 index 00000000000..fc6ed86c7b5 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/__init__.py @@ -0,0 +1,40 @@ +import logging +from asyncio import AbstractEventLoop + +from fastapi import FastAPI + +from ...core.settings import get_application_settings +from ._celery_types import register_celery_types +from ._common import create_app +from .client import CeleryTaskQueueClient + +_logger = logging.getLogger(__name__) + + +def setup_celery_client(app: FastAPI) -> None: + async def on_startup() -> None: + celery_settings = get_application_settings(app).STORAGE_CELERY + assert celery_settings # nosec + celery_app = create_app(celery_settings) + app.state.celery_client = CeleryTaskQueueClient(celery_app) + + register_celery_types() + + app.add_event_handler("startup", on_startup) + + +def get_celery_client(app: FastAPI) -> CeleryTaskQueueClient: + assert hasattr(app.state, "celery_client") # nosec + celery_client = app.state.celery_client + assert isinstance(celery_client, CeleryTaskQueueClient) + return celery_client + + +def get_event_loop(app: FastAPI) -> AbstractEventLoop: + event_loop = app.state.event_loop + assert isinstance(event_loop, AbstractEventLoop) + return event_loop + + +def set_event_loop(app: FastAPI, event_loop: AbstractEventLoop) -> None: + app.state.event_loop = event_loop diff --git a/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py b/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py new file mode 100644 index 00000000000..7fb44e087d5 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py @@ -0,0 +1,61 @@ +from functools import partial +from pathlib import Path +from typing import Any + +from kombu.utils.json import register_type # type: ignore[import-untyped] +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, +) +from pydantic import BaseModel + +from ...models import FileMetaData +from ...modules.celery.models import TaskError + + +def _path_encoder(obj): + if isinstance(obj, Path): + return {"__path__": True, "path": str(obj)} + return obj + + +# Define how Path objects are deserialized +def _path_decoder(obj): + if "__path__" in obj: + return Path(obj["path"]) + return obj + + +def _class_full_name(clz: type) -> str: + return ".".join([clz.__module__, clz.__qualname__]) + + +def _encoder(obj: BaseModel, *args, **kwargs) -> dict[str, Any]: + return obj.model_dump(*args, **kwargs, mode="json") + + +def _decoder(clz: type[BaseModel], data: dict[str, Any]) -> BaseModel: + return clz(**data) + + +def _register_pydantic_types(*models: type[BaseModel]) -> None: + for model in models: + register_type( + model, + _class_full_name(model), + encoder=_encoder, + decoder=partial(_decoder, model), + ) + + +def register_celery_types() -> None: + register_type( + Path, + _class_full_name(Path), + _path_encoder, + _path_decoder, + ) + _register_pydantic_types(FileUploadCompletionBody) + _register_pydantic_types(FileMetaData) + _register_pydantic_types(FoldersBody) + _register_pydantic_types(TaskError) diff --git a/services/storage/src/simcore_service_storage/modules/celery/_common.py b/services/storage/src/simcore_service_storage/modules/celery/_common.py new file mode 100644 index 00000000000..b479408053f --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/_common.py @@ -0,0 +1,31 @@ +import logging +import ssl + +from celery import Celery # type: ignore[import-untyped] +from settings_library.celery import CelerySettings +from settings_library.redis import RedisDatabase + +_logger = logging.getLogger(__name__) + + +def create_app(celery_settings: CelerySettings) -> Celery: + assert celery_settings + + app = Celery( + broker=celery_settings.CELERY_RABBIT_BROKER.dsn, + backend=celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS, + ), + ) + app.conf.broker_connection_retry_on_startup = True + # NOTE: disable SSL cert validation (https://github.com/ITISFoundation/osparc-simcore/pull/7407) + if celery_settings.CELERY_REDIS_RESULT_BACKEND.REDIS_SECURE: + app.conf.redis_backend_use_ssl = {"ssl_cert_reqs": ssl.CERT_NONE} + app.conf.result_expires = celery_settings.CELERY_RESULT_EXPIRES + app.conf.result_extended = True # original args are included in the results + app.conf.result_serializer = "json" + app.conf.task_send_sent_event = True + app.conf.task_track_started = True + app.conf.worker_send_task_events = True # enable tasks monitoring + + return app diff --git a/services/storage/src/simcore_service_storage/modules/celery/_task.py b/services/storage/src/simcore_service_storage/modules/celery/_task.py new file mode 100644 index 00000000000..6e735a8be81 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/_task.py @@ -0,0 +1,117 @@ +import asyncio +import inspect +import logging +import traceback +from collections.abc import Callable, Coroutine +from functools import wraps +from typing import Any, Concatenate, ParamSpec, TypeVar, overload + +from celery import Celery # type: ignore[import-untyped] +from celery.contrib.abortable import AbortableTask # type: ignore[import-untyped] +from celery.exceptions import Ignore # type: ignore[import-untyped] + +from . import get_event_loop +from .models import TaskError, TaskId, TaskState +from .utils import get_fastapi_app + +_logger = logging.getLogger(__name__) + + +def error_handling(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + def wrapper(task: AbortableTask, *args: Any, **kwargs: Any) -> Any: + try: + return func(task, *args, **kwargs) + except Exception as exc: + exc_type = type(exc).__name__ + exc_message = f"{exc}" + exc_traceback = traceback.format_exc().split("\n") + + _logger.exception( + "Task %s failed with exception: %s:%s", + task.request.id, + exc_type, + exc_message, + ) + + task.update_state( + state=TaskState.ERROR.upper(), + meta=TaskError( + exc_type=exc_type, + exc_msg=exc_message, + ).model_dump(mode="json"), + traceback=exc_traceback, + ) + raise Ignore from exc # ignore doing state updates + + return wrapper + + +T = TypeVar("T") +P = ParamSpec("P") +R = TypeVar("R") + + +def _async_task_wrapper( + app: Celery, +) -> Callable[ + [Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]]], + Callable[Concatenate[AbortableTask, P], R], +]: + def decorator( + coro: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + ) -> Callable[Concatenate[AbortableTask, P], R]: + @wraps(coro) + def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: + fastapi_app = get_fastapi_app(app) + _logger.debug("task id: %s", task.request.id) + # NOTE: task.request is a thread local object, so we need to pass the id explicitly + assert task.request.id is not None # nosec + return asyncio.run_coroutine_threadsafe( + coro(task, task.request.id, *args, **kwargs), + get_event_loop(fastapi_app), + ).result() + + return wrapper + + return decorator + + +@overload +def define_task( + app: Celery, + fn: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + task_name: str | None = None, +) -> None: ... + + +@overload +def define_task( + app: Celery, + fn: Callable[Concatenate[AbortableTask, P], R], + task_name: str | None = None, +) -> None: ... + + +def define_task( # type: ignore[misc] + app: Celery, + fn: ( + Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]] + | Callable[Concatenate[AbortableTask, P], R] + ), + task_name: str | None = None, +) -> None: + """Decorator to define a celery task with error handling and abortable support""" + wrapped_fn: Callable[Concatenate[AbortableTask, P], R] + if asyncio.iscoroutinefunction(fn): + wrapped_fn = _async_task_wrapper(app)(fn) + else: + assert inspect.isfunction(fn) # nosec + wrapped_fn = fn + + wrapped_fn = error_handling(wrapped_fn) + app.task( + name=task_name or fn.__name__, + bind=True, + base=AbortableTask, + )(wrapped_fn) diff --git a/services/storage/src/simcore_service_storage/modules/celery/client.py b/services/storage/src/simcore_service_storage/modules/celery/client.py new file mode 100644 index 00000000000..d5b1d1b88af --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/client.py @@ -0,0 +1,176 @@ +import contextlib +import logging +from dataclasses import dataclass +from typing import Any, Final +from uuid import uuid4 + +from celery import Celery # type: ignore[import-untyped] +from celery.contrib.abortable import ( # type: ignore[import-untyped] + AbortableAsyncResult, +) +from common_library.async_tools import make_async +from models_library.progress_bar import ProgressReport +from pydantic import ValidationError +from servicelib.logging_utils import log_context + +from ...exceptions.errors import ConfigurationError +from .models import TaskContext, TaskID, TaskState, TaskStatus, TaskUUID + +_logger = logging.getLogger(__name__) + +_CELERY_INSPECT_TASK_STATUSES: Final[tuple[str, ...]] = ( + "active", + "scheduled", + "revoked", +) +_CELERY_TASK_META_PREFIX: Final[str] = "celery-task-meta-" +_CELERY_STATES_MAPPING: Final[dict[str, TaskState]] = { + "PENDING": TaskState.PENDING, + "STARTED": TaskState.PENDING, + "RETRY": TaskState.PENDING, + "RUNNING": TaskState.RUNNING, + "SUCCESS": TaskState.SUCCESS, + "ABORTED": TaskState.ABORTED, + "FAILURE": TaskState.ERROR, + "ERROR": TaskState.ERROR, +} +_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" +_CELERY_TASK_ID_KEY_ENCODING = "utf-8" + +_MIN_PROGRESS_VALUE = 0.0 +_MAX_PROGRESS_VALUE = 100.0 + + +def _build_context_prefix(task_context: TaskContext) -> list[str]: + return [f"{task_context[key]}" for key in sorted(task_context)] + + +def _build_task_id_prefix(task_context: TaskContext) -> str: + return _CELERY_TASK_ID_KEY_SEPARATOR.join(_build_context_prefix(task_context)) + + +def _build_task_id(task_context: TaskContext, task_uuid: TaskUUID) -> TaskID: + return _CELERY_TASK_ID_KEY_SEPARATOR.join( + [_build_task_id_prefix(task_context), f"{task_uuid}"] + ) + + +@dataclass +class CeleryTaskQueueClient: + _celery_app: Celery + + @make_async() + def send_task( + self, task_name: str, *, task_context: TaskContext, **task_params + ) -> TaskUUID: + with log_context( + _logger, + logging.DEBUG, + msg=f"Submit {task_name=}: {task_context=} {task_params=}", + ): + task_uuid = uuid4() + task_id = _build_task_id(task_context, task_uuid) + self._celery_app.send_task(task_name, task_id=task_id, kwargs=task_params) + return task_uuid + + @staticmethod + @make_async() + def abort_task(task_context: TaskContext, task_uuid: TaskUUID) -> None: + with log_context( + _logger, + logging.DEBUG, + msg=f"Abort task {task_uuid=}: {task_context=}", + ): + task_id = _build_task_id(task_context, task_uuid) + AbortableAsyncResult(task_id).abort() + + @make_async() + def get_task_result(self, task_context: TaskContext, task_uuid: TaskUUID) -> Any: + with log_context( + _logger, + logging.DEBUG, + msg=f"Get task {task_uuid=}: {task_context=} result", + ): + task_id = _build_task_id(task_context, task_uuid) + return self._celery_app.AsyncResult(task_id).result + + def _get_progress_report( + self, task_context: TaskContext, task_uuid: TaskUUID + ) -> ProgressReport: + task_id = _build_task_id(task_context, task_uuid) + result = self._celery_app.AsyncResult(task_id).result + state = self._get_state(task_context, task_uuid) + if result and state == TaskState.RUNNING: + with contextlib.suppress(ValidationError): + # avoids exception if result is not a ProgressReport (or overwritten by a Celery's state update) + return ProgressReport.model_validate(result) + if state in ( + TaskState.ABORTED, + TaskState.ERROR, + TaskState.SUCCESS, + ): + return ProgressReport(actual_value=_MAX_PROGRESS_VALUE) + return ProgressReport(actual_value=_MIN_PROGRESS_VALUE) + + def _get_state(self, task_context: TaskContext, task_uuid: TaskUUID) -> TaskState: + task_id = _build_task_id(task_context, task_uuid) + return _CELERY_STATES_MAPPING[self._celery_app.AsyncResult(task_id).state] + + @make_async() + def get_task_status( + self, task_context: TaskContext, task_uuid: TaskUUID + ) -> TaskStatus: + return TaskStatus( + task_uuid=task_uuid, + task_state=self._get_state(task_context, task_uuid), + progress_report=self._get_progress_report(task_context, task_uuid), + ) + + def _get_completed_task_uuids(self, task_context: TaskContext) -> set[TaskUUID]: + search_key = _CELERY_TASK_META_PREFIX + _build_task_id_prefix(task_context) + backend_client = self._celery_app.backend.client + if hasattr(backend_client, "keys") and ( + keys := backend_client.keys(f"{search_key}*") + ): + return { + TaskUUID( + f"{key.decode(_CELERY_TASK_ID_KEY_ENCODING).removeprefix(search_key + _CELERY_TASK_ID_KEY_SEPARATOR)}" + ) + for key in keys + } + if hasattr(backend_client, "cache"): + # NOTE: backend used in testing. It is a dict-like object + found_keys = set() + for key in backend_client.cache: + str_key = key.decode(_CELERY_TASK_ID_KEY_ENCODING) + if str_key.startswith(search_key): + found_keys.add( + TaskUUID( + f"{str_key.removeprefix(search_key + _CELERY_TASK_ID_KEY_SEPARATOR)}" + ) + ) + return found_keys + msg = f"Unsupported backend {self._celery_app.backend.__class__.__name__}" + raise ConfigurationError(msg=msg) + + @make_async() + def get_task_uuids(self, task_context: TaskContext) -> set[TaskUUID]: + task_uuids = self._get_completed_task_uuids(task_context) + + task_id_prefix = _build_task_id_prefix(task_context) + inspect = self._celery_app.control.inspect() + for task_inspect_status in _CELERY_INSPECT_TASK_STATUSES: + tasks = getattr(inspect, task_inspect_status)() or {} + + task_uuids.update( + TaskUUID( + task_info["id"].removeprefix( + task_id_prefix + _CELERY_TASK_ID_KEY_SEPARATOR + ) + ) + for tasks_per_worker in tasks.values() + for task_info in tasks_per_worker + if "id" in task_info + ) + + return task_uuids diff --git a/services/storage/src/simcore_service_storage/modules/celery/models.py b/services/storage/src/simcore_service_storage/modules/celery/models.py new file mode 100644 index 00000000000..6b72a6e0019 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/models.py @@ -0,0 +1,60 @@ +from enum import StrEnum, auto +from typing import Any, Final, Self, TypeAlias +from uuid import UUID + +from models_library.progress_bar import ProgressReport +from pydantic import BaseModel, model_validator + +TaskContext: TypeAlias = dict[str, Any] +TaskID: TypeAlias = str +TaskUUID: TypeAlias = UUID + +_MIN_PROGRESS: Final[float] = 0.0 +_MAX_PROGRESS: Final[float] = 100.0 + + +class TaskState(StrEnum): + PENDING = auto() + RUNNING = auto() + SUCCESS = auto() + ERROR = auto() + ABORTED = auto() + + +_TASK_DONE = {TaskState.SUCCESS, TaskState.ERROR, TaskState.ABORTED} + + +class TaskStatus(BaseModel): + task_uuid: TaskUUID + task_state: TaskState + progress_report: ProgressReport + + @property + def is_done(self) -> bool: + return self.task_state in _TASK_DONE + + @model_validator(mode="after") + def _check_consistency(self) -> Self: + value = self.progress_report.actual_value + + valid_states = { + TaskState.PENDING: value == _MIN_PROGRESS, + TaskState.RUNNING: _MIN_PROGRESS <= value <= _MAX_PROGRESS, + TaskState.SUCCESS: value == _MAX_PROGRESS, + TaskState.ABORTED: value == _MAX_PROGRESS, + TaskState.ERROR: value == _MAX_PROGRESS, + } + + if not valid_states.get(self.task_state, True): + msg = f"Inconsistent progress actual value for state={self.task_state}: {value}" + raise ValueError(msg) + + return self + + +class TaskError(BaseModel): + exc_type: str + exc_msg: str + + +TaskId: TypeAlias = str diff --git a/services/storage/src/simcore_service_storage/modules/celery/signals.py b/services/storage/src/simcore_service_storage/modules/celery/signals.py new file mode 100644 index 00000000000..7f3835286bc --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/signals.py @@ -0,0 +1,71 @@ +import asyncio +import logging +import threading +from typing import Final + +from asgi_lifespan import LifespanManager +from celery import Celery # type: ignore[import-untyped] +from fastapi import FastAPI +from servicelib.async_utils import cancel_wait_task + +from ...core.application import create_app +from ...core.settings import ApplicationSettings +from ...modules.celery import get_event_loop, set_event_loop +from ...modules.celery.utils import ( + get_fastapi_app, + set_celery_worker, + set_fastapi_app, +) +from ...modules.celery.worker import CeleryTaskQueueWorker + +_logger = logging.getLogger(__name__) + +_LIFESPAN_TIMEOUT: Final[int] = 10 + + +def on_worker_init(sender, **_kwargs) -> None: + def _init_fastapi() -> None: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + shutdown_event = asyncio.Event() + + fastapi_app = create_app(ApplicationSettings.create_from_envs()) + + async def lifespan(): + async with LifespanManager( + fastapi_app, + startup_timeout=_LIFESPAN_TIMEOUT, + shutdown_timeout=_LIFESPAN_TIMEOUT, + ): + try: + await shutdown_event.wait() + except asyncio.CancelledError: + _logger.warning("Lifespan task cancelled") + + lifespan_task = loop.create_task(lifespan()) + fastapi_app.state.lifespan_task = lifespan_task + fastapi_app.state.shutdown_event = shutdown_event + set_event_loop(fastapi_app, loop) + + set_fastapi_app(sender.app, fastapi_app) + set_celery_worker(sender.app, CeleryTaskQueueWorker(sender.app)) + + loop.run_forever() + + thread = threading.Thread(target=_init_fastapi, daemon=True) + thread.start() + + +def on_worker_shutdown(sender, **_kwargs): + assert isinstance(sender.app, Celery) + + fastapi_app = get_fastapi_app(sender.app) + assert isinstance(fastapi_app, FastAPI) + event_loop = get_event_loop(fastapi_app) + + async def shutdown(): + fastapi_app.state.shutdown_event.set() + + await cancel_wait_task(fastapi_app.state.lifespan_task, max_delay=5) + + asyncio.run_coroutine_threadsafe(shutdown(), event_loop) diff --git a/services/storage/src/simcore_service_storage/modules/celery/tasks.py b/services/storage/src/simcore_service_storage/modules/celery/tasks.py new file mode 100644 index 00000000000..014151acd74 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/tasks.py @@ -0,0 +1,29 @@ +import logging +import time + +from celery import Task # type: ignore[import-untyped] +from models_library.progress_bar import ProgressReport +from models_library.projects_nodes_io import StorageFileID +from servicelib.logging_utils import log_context + +from .utils import get_celery_worker + +_logger = logging.getLogger(__name__) + + +def export_data(task: Task, files: list[StorageFileID]): + _logger.info("Exporting files: %s", files) + for n, file in enumerate(files, start=1): + with log_context( + _logger, + logging.INFO, + msg=f"Exporting {file=} ({n}/{len(files)})", + ): + assert task.name + get_celery_worker(task.app).set_task_progress( + task_name=task.name, + task_id=task.request.id, + report=ProgressReport(actual_value=n / len(files) * 100), + ) + time.sleep(10) + return "done" diff --git a/services/storage/src/simcore_service_storage/modules/celery/utils.py b/services/storage/src/simcore_service_storage/modules/celery/utils.py new file mode 100644 index 00000000000..88c346ea211 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/utils.py @@ -0,0 +1,27 @@ +from celery import Celery # type: ignore[import-untyped] +from fastapi import FastAPI + +from .worker import CeleryTaskQueueWorker + +_WORKER_KEY = "celery_worker" +_FASTAPI_APP_KEY = "fastapi_app" + + +def get_celery_worker(celery_app: Celery) -> CeleryTaskQueueWorker: + worker = celery_app.conf[_WORKER_KEY] + assert isinstance(worker, CeleryTaskQueueWorker) + return worker + + +def get_fastapi_app(celery_app: Celery) -> FastAPI: + fastapi_app = celery_app.conf[_FASTAPI_APP_KEY] + assert isinstance(fastapi_app, FastAPI) + return fastapi_app + + +def set_celery_worker(celery_app: Celery, worker: CeleryTaskQueueWorker) -> None: + celery_app.conf[_WORKER_KEY] = worker + + +def set_fastapi_app(celery_app: Celery, fastapi_app: FastAPI) -> None: + celery_app.conf[_FASTAPI_APP_KEY] = fastapi_app diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker.py b/services/storage/src/simcore_service_storage/modules/celery/worker.py new file mode 100644 index 00000000000..c0983734905 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/worker.py @@ -0,0 +1,28 @@ +import logging + +from celery import Celery # type: ignore[import-untyped] +from models_library.progress_bar import ProgressReport +from servicelib.logging_utils import log_context + +from .models import TaskID + +_logger = logging.getLogger(__name__) + + +class CeleryTaskQueueWorker: + def __init__(self, celery_app: Celery) -> None: + self.celery_app = celery_app + + def set_task_progress( + self, task_name: str, task_id: TaskID, report: ProgressReport + ) -> None: + with log_context( + _logger, + logging.DEBUG, + msg=f"Setting progress for {task_name}: {report.model_dump_json()}", + ): + self.celery_app.tasks[task_name].update_state( + task_id=task_id, + state="RUNNING", + meta=report.model_dump(mode="json"), + ) diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py new file mode 100644 index 00000000000..99b9a53676e --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -0,0 +1,34 @@ +"""Main application to be deployed in for example uvicorn.""" + +import logging + +from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] +from servicelib.logging_utils import config_all_loggers +from simcore_service_storage.api._worker_tasks.tasks import setup_worker_tasks + +from ...core.settings import ApplicationSettings +from ._common import create_app as create_celery_app +from .signals import ( + on_worker_init, + on_worker_shutdown, +) + +_settings = ApplicationSettings.create_from_envs() + +logging.basicConfig(level=_settings.log_level) # NOSONAR +logging.root.setLevel(_settings.log_level) +config_all_loggers( + log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, + tracing_settings=_settings.STORAGE_TRACING, +) + +_logger = logging.getLogger(__name__) + +assert _settings.STORAGE_CELERY +app = create_celery_app(_settings.STORAGE_CELERY) +worker_init.connect(on_worker_init) +worker_shutdown.connect(on_worker_shutdown) + + +setup_worker_tasks(app) diff --git a/services/storage/src/simcore_service_storage/modules/datcore_adapter/__init__.py b/services/storage/src/simcore_service_storage/modules/datcore_adapter/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter.py b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter.py new file mode 100644 index 00000000000..0f5ec1dae12 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter.py @@ -0,0 +1,377 @@ +import logging +from typing import Any, TypeAlias, cast + +import httpx +from fastapi import FastAPI, status +from fastapi_pagination import Page +from models_library.api_schemas_datcore_adapter.datasets import ( + DatasetMetaData as DatCoreDatasetMetaData, +) +from models_library.api_schemas_datcore_adapter.datasets import ( + FileMetaData as DatCoreFileMetaData, +) +from models_library.api_schemas_datcore_adapter.datasets import ( + PackageMetaData, +) +from models_library.api_schemas_storage.storage_schemas import ( + DatCoreCollectionName, + DatCoreDatasetName, + DatCorePackageName, +) +from models_library.users import UserID +from pydantic import AnyUrl, BaseModel, ByteSize, NonNegativeInt, TypeAdapter +from servicelib.fastapi.client_session import get_client_session +from servicelib.utils import logged_gather + +from ...constants import DATCORE_ID, DATCORE_STR, MAX_CONCURRENT_REST_CALLS +from ...core.settings import get_application_settings +from ...models import ( + DatasetMetaData, + FileMetaData, + GenericCursor, + PathMetaData, + TotalNumber, +) +from .datcore_adapter_client_utils import request, retrieve_all_pages +from .datcore_adapter_exceptions import ( + DatcoreAdapterError, + DatcoreAdapterFileNotFoundError, + DatcoreAdapterResponseError, +) +from .utils import ( + create_path_meta_data_from_datcore_fmd, + create_path_meta_data_from_datcore_package, +) + +_logger = logging.getLogger(__file__) + + +async def check_service_health(app: FastAPI) -> bool: + datcore_adapter_settings = get_application_settings(app).DATCORE_ADAPTER + url = datcore_adapter_settings.endpoint + "/ready" + session = get_client_session(app) + try: + response = await session.get(url) + response.raise_for_status() + except (TimeoutError, httpx.HTTPStatusError): + return False + return True + + +async def check_user_can_connect(app: FastAPI, api_key: str, api_secret: str) -> bool: + if not api_key or not api_secret: + # no need to ask, datcore is an authenticated service + return False + + try: + await request(app, api_key, api_secret, "GET", "/user/profile") + return True + except DatcoreAdapterError: + return False + + +async def list_all_datasets_files_metadatas( + app: FastAPI, user_id: UserID, api_key: str, api_secret: str +) -> list[FileMetaData]: + all_datasets: list[DatasetMetaData] = await list_all_datasets( + app, api_key, api_secret + ) + results = await logged_gather( + *( + list_all_files_metadatas_in_dataset( + app, + user_id, + api_key, + api_secret, + cast(DatCoreDatasetName, d.dataset_id), + ) + for d in all_datasets + ), + log=_logger, + max_concurrency=MAX_CONCURRENT_REST_CALLS, + ) + all_files_of_all_datasets: list[FileMetaData] = [] + for data in results: + all_files_of_all_datasets += data + return all_files_of_all_datasets + + +_LIST_ALL_DATASETS_TIMEOUT_S = 60 + + +async def list_all_files_metadatas_in_dataset( + app: FastAPI, + user_id: UserID, + api_key: str, + api_secret: str, + dataset_id: DatCoreDatasetName, +) -> list[FileMetaData]: + all_files: list[dict[str, Any]] = cast( + list[dict[str, Any]], + await request( + app, + api_key, + api_secret, + "GET", + f"/datasets/{dataset_id}/files_legacy", + timeout=_LIST_ALL_DATASETS_TIMEOUT_S, + ), + ) + return [ + FileMetaData.model_construct( + file_uuid=d["path"], + location_id=DATCORE_ID, + location=DATCORE_STR, + bucket_name=d["dataset_id"], + object_name=d["path"], + file_name=d["name"], + file_id=d["package_id"], + file_size=d["size"], + created_at=d["created_at"], + last_modified=d["last_modified_at"], + project_id=None, + node_id=None, + user_id=user_id, + is_soft_link=False, + ) + for d in all_files + ] + + +_Size: TypeAlias = NonNegativeInt +_Page: TypeAlias = NonNegativeInt + + +class CursorParameters(BaseModel): + next_page: _Page + size: _Size + + +def _init_pagination( + cursor: GenericCursor | None, limit: NonNegativeInt +) -> tuple[_Page, _Size]: + if cursor is not None: + cursor_params = CursorParameters.model_validate_json(cursor) + return cursor_params.next_page, cursor_params.size + return 1, limit + + +def _create_next_cursor( + total: TotalNumber, page: _Page, size: _Size +) -> GenericCursor | None: + if total > page * size: + return CursorParameters.model_validate( + {"next_page": page + 1, "size": size} + ).model_dump_json() + return None + + +async def _list_top_level_objects( + app: FastAPI, + *, + user_id: UserID, + api_key: str, + api_secret: str, + cursor: GenericCursor | None, + limit: NonNegativeInt, + request_path: str, +) -> tuple[list[PathMetaData], GenericCursor | None, TotalNumber]: + page, size = _init_pagination(cursor, limit) + response = await request( + app, + api_key, + api_secret, + "GET", + request_path, + params={"size": size, "page": page}, + ) + assert isinstance(response, dict) # nosec + file_metadata_page = Page[DatCoreFileMetaData](**response) + entries = file_metadata_page.items + total = file_metadata_page.total + assert isinstance(total, int) # nosec + next_cursor = _create_next_cursor(total, page, size) + + return ( + [create_path_meta_data_from_datcore_fmd(user_id, e) for e in entries], + next_cursor, + total, + ) + + +async def list_top_level_objects_in_dataset( + app: FastAPI, + *, + user_id: UserID, + api_key: str, + api_secret: str, + dataset_id: DatCoreDatasetName, + cursor: GenericCursor | None, + limit: NonNegativeInt, +) -> tuple[list[PathMetaData], GenericCursor | None, TotalNumber]: + return await _list_top_level_objects( + app, + user_id=user_id, + api_key=api_key, + api_secret=api_secret, + cursor=cursor, + limit=limit, + request_path=f"/datasets/{dataset_id}/files", + ) + + +async def list_top_level_objects_in_collection( + app: FastAPI, + *, + user_id: UserID, + api_key: str, + api_secret: str, + dataset_id: DatCoreDatasetName, + collection_id: DatCoreCollectionName, + cursor: GenericCursor | None, + limit: NonNegativeInt, +) -> tuple[list[PathMetaData], GenericCursor | None, TotalNumber]: + return await _list_top_level_objects( + app, + user_id=user_id, + api_key=api_key, + api_secret=api_secret, + cursor=cursor, + limit=limit, + request_path=f"/datasets/{dataset_id}/files/{collection_id}", + ) + + +async def get_package_file_as_path( + app: FastAPI, + *, + user_id: UserID, + api_key: str, + api_secret: str, + dataset_id: DatCoreDatasetName, + package_id: DatCorePackageName, +) -> PathMetaData: + pck_files = await get_package_files( + app, + api_key=api_key, + api_secret=api_secret, + package_id=package_id, + ) + + assert len(pck_files) == 1 # nosec + return create_path_meta_data_from_datcore_package( + user_id, + dataset_id, + pck_files[0], + ) + + +async def list_all_datasets( + app: FastAPI, api_key: str, api_secret: str +) -> list[DatasetMetaData]: + all_datasets: list[DatasetMetaData] = await retrieve_all_pages( + app, + api_key, + api_secret, + "GET", + "/datasets", + lambda d: DatasetMetaData(dataset_id=d["id"], display_name=d["display_name"]), + ) + + return all_datasets + + +async def list_datasets( + app: FastAPI, + *, + api_key: str, + api_secret: str, + cursor: GenericCursor | None, + limit: NonNegativeInt, +) -> tuple[list[DatasetMetaData], GenericCursor | None, TotalNumber]: + page, size = _init_pagination(cursor, limit) + + response = await request( + app, + api_key, + api_secret, + "GET", + "/datasets", + params={"size": size, "page": page}, + ) + assert isinstance(response, dict) # nosec + datasets_page = Page[DatCoreDatasetMetaData](**response) + datasets = datasets_page.items + total = datasets_page.total + + assert isinstance(total, int) # nosec + next_cursor = _create_next_cursor(total, page, size) + + return ( + [ + DatasetMetaData(dataset_id=d.id, display_name=d.display_name) + for d in datasets + ], + next_cursor, + total, + ) + + +async def get_dataset( + app: FastAPI, + *, + api_key: str, + api_secret: str, + dataset_id: DatCoreDatasetName, +) -> tuple[DatasetMetaData, ByteSize | None]: + response = await request( + app, + api_key, + api_secret, + "GET", + f"/datasets/{dataset_id}", + ) + assert isinstance(response, dict) # nosec + datcore_dataset = DatCoreDatasetMetaData(**response) + + return ( + DatasetMetaData( + dataset_id=datcore_dataset.id, display_name=datcore_dataset.display_name + ), + datcore_dataset.size, + ) + + +async def get_file_download_presigned_link( + app: FastAPI, api_key: str, api_secret: str, file_id: str +) -> AnyUrl: + try: + file_download_data = cast( + dict[str, Any], + await request(app, api_key, api_secret, "GET", f"/files/{file_id}"), + ) + return TypeAdapter(AnyUrl).validate_python(file_download_data["link"]) + except DatcoreAdapterResponseError as exc: + if exc.status == status.HTTP_404_NOT_FOUND: + raise DatcoreAdapterFileNotFoundError(file_id=file_id) from exc + raise + + +async def get_package_files( + app: FastAPI, *, api_key: str, api_secret: str, package_id: str +) -> list[PackageMetaData]: + return TypeAdapter(list[PackageMetaData]).validate_python( + await request( + app, + api_key, + api_secret, + "GET", + f"/packages/{package_id}/files", + ) + ) + + +async def delete_file( + app: FastAPI, api_key: str, api_secret: str, file_id: str +) -> None: + await request(app, api_key, api_secret, "DELETE", f"/files/{file_id}") diff --git a/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_client_utils.py b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_client_utils.py new file mode 100644 index 00000000000..3972d07d72d --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_client_utils.py @@ -0,0 +1,99 @@ +import logging +from collections.abc import Callable +from math import ceil +from typing import Any, TypeVar, cast + +import httpx +from fastapi import FastAPI +from servicelib.fastapi.client_session import get_client_session + +from ...core.settings import get_application_settings +from .datcore_adapter_exceptions import ( + DatcoreAdapterClientError, + DatcoreAdapterResponseError, + DatcoreAdapterTimeoutError, +) + +_logger = logging.getLogger(__file__) + + +async def request( + app: FastAPI, + api_key: str, + api_secret: str, + method: str, + path: str, + *, + json: dict[str, Any] | None = None, + params: dict[str, Any] | None = None, + **request_kwargs, +) -> dict[str, Any] | list[dict[str, Any]]: + datcore_adapter_settings = get_application_settings(app).DATCORE_ADAPTER + url = datcore_adapter_settings.endpoint + path + session = get_client_session(app) + + try: + if request_kwargs is None: + request_kwargs = {} + response = await session.request( + method.upper(), + url, + headers={ + "x-datcore-api-key": api_key, + "x-datcore-api-secret": api_secret, + }, + json=json, + params=params, + **request_kwargs, + ) + response.raise_for_status() + response_data = response.json() + assert isinstance(response_data, dict | list) # nosec + return response_data + + except httpx.HTTPStatusError as exc: + raise DatcoreAdapterResponseError( + status=exc.response.status_code, reason=f"{exc}" + ) from exc + + except TimeoutError as exc: + msg = f"datcore-adapter server timed-out: {exc}" + raise DatcoreAdapterTimeoutError(msg) from exc + + except httpx.RequestError as exc: + msg = f"unexpected request error: {exc}" + raise DatcoreAdapterClientError(msg) from exc + + +_T = TypeVar("_T") + + +async def retrieve_all_pages( + app: FastAPI, + api_key: str, + api_secret: str, + method: str, + path: str, + return_type_creator: Callable[..., _T], +) -> list[_T]: + page = 1 + objs = [] + while ( + response := cast( + dict[str, Any], + await request( + app, api_key, api_secret, method, path, params={"page": page} + ), + ) + ) and response.get("items"): + _logger.debug( + "called %s [%d/%d], received %d objects", + path, + page, + ceil(response.get("total", -1) / response.get("size", 1)), + len(response.get("items", [])), + ) + + objs += [return_type_creator(d) for d in response.get("items", [])] + page += 1 + return objs diff --git a/services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter_exceptions.py b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_exceptions.py similarity index 63% rename from services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter_exceptions.py rename to services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_exceptions.py index 03ba1aa603c..f4643380ab0 100644 --- a/services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter_exceptions.py +++ b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_exceptions.py @@ -33,3 +33,22 @@ class DatcoreAdapterMultipleFilesError(DatcoreAdapterError): def __init__(self, msg: str) -> None: super().__init__(msg=msg) + + +class DatcoreAdapterResponseError(DatcoreAdapterError): + """Basic exception for response errors""" + + def __init__(self, status: int, reason: str) -> None: + self.status = status + self.reason = reason + super().__init__( + msg=f"forwarded call failed with status {status}, reason {reason}" + ) + + +class DatcoreAdapterFileNotFoundError(DatcoreAdapterError): + """special error to check the assumption that /packages/{package_id}/files returns only one file""" + + def __init__(self, file_id: str) -> None: + self.file_id = file_id + super().__init__(msg=f"file {file_id} not found!") diff --git a/services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter_settings.py b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_settings.py similarity index 100% rename from services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter_settings.py rename to services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter_settings.py diff --git a/services/storage/src/simcore_service_storage/modules/datcore_adapter/utils.py b/services/storage/src/simcore_service_storage/modules/datcore_adapter/utils.py new file mode 100644 index 00000000000..34a1379f8c0 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/datcore_adapter/utils.py @@ -0,0 +1,97 @@ +from pathlib import Path + +from models_library.api_schemas_datcore_adapter.datasets import ( + DataType as DatCoreDataType, +) +from models_library.api_schemas_datcore_adapter.datasets import ( + FileMetaData as DatCoreFileMetaData, +) +from models_library.api_schemas_datcore_adapter.datasets import PackageMetaData +from models_library.api_schemas_storage.storage_schemas import DatCoreDatasetName +from models_library.users import UserID +from pydantic import ByteSize + +from ...constants import DATCORE_ID, DATCORE_STR +from ...models import FileMetaData, PathMetaData + + +def create_fmd_from_datcore_package( + user_id: UserID, pck_metadata: PackageMetaData +) -> FileMetaData: + return FileMetaData( + file_uuid=f"{pck_metadata.package_id}", + location_id=DATCORE_ID, + location=DATCORE_STR, + bucket_name=pck_metadata.s3_bucket, + object_name=f"{pck_metadata.package_id}", + file_name=pck_metadata.name, + file_id=pck_metadata.package_id, + file_size=ByteSize(pck_metadata.size), + created_at=pck_metadata.created_at, + last_modified=pck_metadata.updated_at, + project_id=None, + node_id=None, + user_id=user_id, + is_soft_link=False, + sha256_checksum=None, + ) + + +def create_fmd_from_datcore_fmd( + user_id: UserID, dat_core_fmd: DatCoreFileMetaData +) -> FileMetaData: + return FileMetaData( + file_uuid=f"{dat_core_fmd.path}", + location_id=DATCORE_ID, + location=DATCORE_STR, + bucket_name=dat_core_fmd.dataset_id, + object_name=f"{dat_core_fmd.package_id}", + file_name=dat_core_fmd.name, + file_id=dat_core_fmd.package_id, + file_size=ByteSize(dat_core_fmd.size), + created_at=dat_core_fmd.created_at, + last_modified=dat_core_fmd.last_modified_at, + project_id=None, + node_id=None, + user_id=user_id, + is_soft_link=False, + sha256_checksum=None, + ) + + +def create_path_meta_data_from_datcore_package( + user_id: UserID, dataset_id: DatCoreDatasetName, pck_metadata: PackageMetaData +) -> PathMetaData: + return PathMetaData( + path=Path(dataset_id) / pck_metadata.package_id, + display_path=pck_metadata.display_path, + location_id=DATCORE_ID, + location=DATCORE_STR, + bucket_name=pck_metadata.s3_bucket, + project_id=None, + node_id=None, + user_id=user_id, + created_at=pck_metadata.created_at, + last_modified=pck_metadata.updated_at, + file_meta_data=create_fmd_from_datcore_package(user_id, pck_metadata), + ) + + +def create_path_meta_data_from_datcore_fmd( + user_id: UserID, dat_core_fmd: DatCoreFileMetaData +) -> PathMetaData: + return PathMetaData( + path=Path(dat_core_fmd.dataset_id) / dat_core_fmd.id, + display_path=dat_core_fmd.path, + location_id=DATCORE_ID, + location=DATCORE_STR, + bucket_name=dat_core_fmd.dataset_id, + project_id=None, + node_id=None, + user_id=user_id, + created_at=dat_core_fmd.created_at, + last_modified=dat_core_fmd.last_modified_at, + file_meta_data=None + if dat_core_fmd.data_type == DatCoreDataType.FOLDER + else create_fmd_from_datcore_fmd(user_id, dat_core_fmd), + ) diff --git a/services/storage/src/simcore_service_storage/modules/db/__init__.py b/services/storage/src/simcore_service_storage/modules/db/__init__.py new file mode 100644 index 00000000000..41372f5a2c2 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/db/__init__.py @@ -0,0 +1,31 @@ +import logging + +from fastapi import FastAPI +from servicelib.db_async_engine import close_db_connection +from servicelib.fastapi.db_asyncpg_engine import connect_to_db +from servicelib.retry_policies import PostgresRetryPolicyUponInitialization +from sqlalchemy.ext.asyncio import AsyncEngine +from tenacity import retry + +from ...core.settings import get_application_settings + +_logger = logging.getLogger(__name__) + + +def setup_db(app: FastAPI) -> None: + @retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs) + async def _on_startup() -> None: + app_settings = get_application_settings(app) + assert app_settings.STORAGE_POSTGRES is not None # nosec + await connect_to_db(app, app_settings.STORAGE_POSTGRES) + + async def _on_shutdown() -> None: + await close_db_connection(app) + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + +def get_db_engine(app: FastAPI) -> AsyncEngine: + assert isinstance(app.state.engine, AsyncEngine) # nosec + return app.state.engine diff --git a/services/storage/src/simcore_service_storage/modules/db/_base.py b/services/storage/src/simcore_service_storage/modules/db/_base.py new file mode 100644 index 00000000000..5bacb6def41 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/db/_base.py @@ -0,0 +1,15 @@ +from dataclasses import dataclass +from typing import TypeVar + +from sqlalchemy.ext.asyncio import AsyncEngine + +RepositoryType = TypeVar("RepositoryType", bound="BaseRepository") + + +@dataclass +class BaseRepository: + db_engine: AsyncEngine + + @classmethod + def instance(cls: type[RepositoryType], db_engine: AsyncEngine) -> RepositoryType: + return cls(db_engine=db_engine) diff --git a/services/storage/src/simcore_service_storage/db_access_layer.py b/services/storage/src/simcore_service_storage/modules/db/access_layer.py similarity index 52% rename from services/storage/src/simcore_service_storage/db_access_layer.py rename to services/storage/src/simcore_service_storage/modules/db/access_layer.py index 27f9dfb9214..0f70f0e959a 100644 --- a/services/storage/src/simcore_service_storage/db_access_layer.py +++ b/services/storage/src/simcore_service_storage/modules/db/access_layer.py @@ -1,4 +1,4 @@ -""" Helper functions to determin access-rights on stored data +"""Helper functions to determin access-rights on stored data # DRAFT Rationale: @@ -37,11 +37,8 @@ """ import logging -from dataclasses import dataclass import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import ResultProxy, RowProxy from models_library.groups import GroupID from models_library.projects import ProjectID from models_library.projects_nodes_io import StorageFileID @@ -52,49 +49,22 @@ workspaces_access_rights, ) from simcore_postgres_database.storage_models import file_meta_data, user_to_groups +from simcore_postgres_database.utils_repos import pass_or_acquire_connection from simcore_postgres_database.utils_sql import assemble_array_groups +from sqlalchemy.ext.asyncio import AsyncConnection -logger = logging.getLogger(__name__) +from ...exceptions.errors import InvalidFileIdentifierError +from ...models import AccessRights +from ._base import BaseRepository +_logger = logging.getLogger(__name__) -@dataclass(frozen=True) -class AccessRights: - read: bool - write: bool - delete: bool - @classmethod - def all(cls) -> "AccessRights": - return cls(read=True, write=True, delete=True) - - @classmethod - def none(cls) -> "AccessRights": - return cls(read=False, write=False, delete=False) - - -class AccessLayerError(Exception): - """Base class for access-layer related errors""" - - -class InvalidFileIdentifierError(AccessLayerError): - """Identifier does not follow the criteria to - be a file identifier (see naming criteria below) - """ - - def __init__(self, identifier, reason=None, details=None): - self.identifier = identifier - self.reason = reason or "Invalid file identifier" - self.details = details - - super().__init__(self.reason, self.details) - - def __str__(self): - return f"Error in {self.identifier}: {self.reason} [{self.details}]" - - -async def _get_user_groups_ids(conn: SAConnection, user_id: UserID) -> list[GroupID]: +async def _get_user_groups_ids( + connection: AsyncConnection, user_id: UserID +) -> list[GroupID]: stmt = sa.select(user_to_groups.c.gid).where(user_to_groups.c.uid == user_id) - rows = await (await conn.execute(stmt)).fetchall() + rows = (await connection.execute(stmt)).fetchall() assert rows is not None # nosec return [g.gid for g in rows] @@ -112,7 +82,7 @@ def _aggregate_access_rights( return AccessRights(**prj_access) except KeyError: # NOTE: database does NOT include schema for json access_rights column! - logger.warning( + _logger.warning( "Invalid entry in projects.access_rights. Revoking all rights [%s]", access_rights, ) @@ -159,14 +129,14 @@ def _aggregate_access_rights( ).subquery("workspace_access_rights_subquery") -async def list_projects_access_rights( - conn: SAConnection, user_id: UserID +async def _list_projects_access_rights( + connection: AsyncConnection, user_id: UserID ) -> dict[ProjectID, AccessRights]: """ Returns access-rights of user (user_id) over all OWNED or SHARED projects """ - user_group_ids: list[GroupID] = await _get_user_groups_ids(conn, user_id) + user_group_ids: list[GroupID] = await _get_user_groups_ids(connection, user_id) private_workspace_query = ( sa.select( @@ -211,12 +181,12 @@ async def list_projects_access_rights( projects_access_rights = {} - async for row in conn.execute(combined_query): + async for row in await connection.stream(combined_query): assert isinstance(row.access_rights, dict) # nosec assert isinstance(row.uuid, str) # nosec if row.access_rights: - # TODO: access_rights should be direclty filtered from result in stm instead calling again user_group_ids + # NOTE: access_rights should be direclty filtered from result in stm instead calling again user_group_ids projects_access_rights[ProjectID(row.uuid)] = _aggregate_access_rights( row.access_rights, user_group_ids ) @@ -229,156 +199,164 @@ async def list_projects_access_rights( return projects_access_rights -async def get_project_access_rights( - conn: SAConnection, user_id: UserID, project_id: ProjectID -) -> AccessRights: - """ - Returns access-rights of user (user_id) over a project resource (project_id) - """ - user_group_ids: list[GroupID] = await _get_user_groups_ids(conn, user_id) - - private_workspace_query = ( - sa.select( - projects.c.prj_owner, - access_rights_subquery.c.access_rights, - ) - .select_from(projects.join(access_rights_subquery, isouter=True)) - .where( - (projects.c.uuid == f"{project_id}") - & ( - (projects.c.prj_owner == user_id) - | sa.text( - f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_group_ids)})" +class AccessLayerRepository(BaseRepository): + async def get_project_access_rights( + self, + *, + connection: AsyncConnection | None = None, + user_id: UserID, + project_id: ProjectID, + ) -> AccessRights: + """ + Returns access-rights of user (user_id) over a project resource (project_id) + """ + + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + user_group_ids = await _get_user_groups_ids(conn, user_id) + + private_workspace_query = ( + sa.select( + projects.c.prj_owner, + access_rights_subquery.c.access_rights, + ) + .select_from(projects.join(access_rights_subquery, isouter=True)) + .where( + (projects.c.uuid == f"{project_id}") + & ( + (projects.c.prj_owner == user_id) + | sa.text( + f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_group_ids)})" + ) + ) + & (projects.c.workspace_id.is_(None)) ) ) - & (projects.c.workspace_id.is_(None)) - ) - ) - shared_workspace_query = ( - sa.select( - projects.c.prj_owner, - workspace_access_rights_subquery.c.access_rights, - ) - .select_from( - projects.join( - workspace_access_rights_subquery, - projects.c.workspace_id - == workspace_access_rights_subquery.c.workspace_id, - ) - ) - .where( - (projects.c.uuid == f"{project_id}") - & ( - sa.text( - f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_group_ids)})" + shared_workspace_query = ( + sa.select( + projects.c.prj_owner, + workspace_access_rights_subquery.c.access_rights, + ) + .select_from( + projects.join( + workspace_access_rights_subquery, + projects.c.workspace_id + == workspace_access_rights_subquery.c.workspace_id, + ) + ) + .where( + (projects.c.uuid == f"{project_id}") + & ( + sa.text( + f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_group_ids)})" + ) + ) + & (projects.c.workspace_id.is_not(None)) ) ) - & (projects.c.workspace_id.is_not(None)) - ) - ) - - combined_query = sa.union_all(private_workspace_query, shared_workspace_query) - - result: ResultProxy = await conn.execute(combined_query) - row: RowProxy | None = await result.first() - if not row: - # Either project does not exists OR user_id has NO access - return AccessRights.none() - - assert row.prj_owner is None or isinstance(row.prj_owner, int) # nosec - assert isinstance(row.access_rights, dict) # nosec - - if row.prj_owner == user_id: - return AccessRights.all() - - # determine user's access rights by aggregating AR of all groups - return _aggregate_access_rights(row.access_rights, user_group_ids) - - -async def get_file_access_rights( - conn: SAConnection, user_id: UserID, file_id: StorageFileID -) -> AccessRights: - """ - Returns access-rights of user (user_id) over data file resource (file_id) + combined_query = sa.union_all( + private_workspace_query, shared_workspace_query + ) + result = await conn.execute(combined_query) + row = result.one_or_none() - raises InvalidFileIdentifier - """ + if not row: + # Either project does not exists OR user_id has NO access + return AccessRights.none() - # - # 1. file registered in file_meta_data table - # - stmt = sa.select(file_meta_data.c.project_id, file_meta_data.c.user_id).where( - file_meta_data.c.file_id == f"{file_id}" - ) - result: ResultProxy = await conn.execute(stmt) - row: RowProxy | None = await result.first() + assert row.prj_owner is None or isinstance(row.prj_owner, int) # nosec + assert isinstance(row.access_rights, dict) # nosec - if row: - if int(row.user_id) == user_id: - # is owner + if row.prj_owner == user_id: return AccessRights.all() - if not row.project_id: - # not owner and not shared via project - return AccessRights.none() + # determine user's access rights by aggregating AR of all groups + return _aggregate_access_rights(row.access_rights, user_group_ids) - # has associated project - access_rights = await get_project_access_rights( - conn, user_id, project_id=row.project_id - ) - if not access_rights: - logger.warning( - "File %s references a project %s that does not exists in db." - "TIP: Audit sync between files_meta_data and projects tables", - file_id, - row.project_id, - ) - return AccessRights.none() + async def get_file_access_rights( + self, + *, + connection: AsyncConnection | None = None, + user_id: UserID, + file_id: StorageFileID, + ) -> AccessRights: + """ + Returns access-rights of user (user_id) over data file resource (file_id) + + raises InvalidFileIdentifier + """ - else: - # - # 2. file is NOT registered in meta-data table e.g. it is about to be uploaded or it was deleted - # We rely on the assumption that file_id is formatted either as # - # - project's data: {project_id}/{node_id}/{filename/with/possible/folders} - # - API data: api/{file_id}/{filename/with/possible/folders} + # 1. file registered in file_meta_data table # - try: - parent, _, _ = file_id.split("/", maxsplit=2) + stmt = sa.select(file_meta_data.c.project_id, file_meta_data.c.user_id).where( + file_meta_data.c.file_id == f"{file_id}" + ) + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + result = await conn.execute(stmt) + row = result.one_or_none() - if parent == "api": - # FIXME: this is wrong, all api data must be registered and OWNED - # ownership still not defined, so we assume it is user_id + if row: + if int(row.user_id) == user_id: + # is owner return AccessRights.all() - # otherwise assert 'parent' string corresponds to a valid UUID - access_rights = await get_project_access_rights( - conn, user_id, project_id=ProjectID(parent) + if not row.project_id: + # not owner and not shared via project + return AccessRights.none() + + # has associated project + access_rights = await self.get_project_access_rights( + user_id=user_id, project_id=row.project_id ) if not access_rights: - logger.warning( - "File %s references a project that does not exists in db", + _logger.warning( + "File %s references a project %s that does not exists in db." + "TIP: Audit sync between files_meta_data and projects tables", file_id, + row.project_id, ) return AccessRights.none() - except (ValueError, AttributeError) as err: - raise InvalidFileIdentifierError( - identifier=file_id, - details=str(err), - ) from err - - return access_rights - - -# HELPERS ----------------------------------------------- - - -async def get_readable_project_ids( - conn: SAConnection, user_id: UserID -) -> list[ProjectID]: - """Returns a list of projects where user has granted read-access""" - projects_access_rights = await list_projects_access_rights(conn, user_id) - return [pid for pid, access in projects_access_rights.items() if access.read] + else: + # + # 2. file is NOT registered in meta-data table e.g. it is about to be uploaded or it was deleted + # We rely on the assumption that file_id is formatted either as + # + # - project's data: {project_id}/{node_id}/{filename/with/possible/folders} + # - API data: api/{file_id}/{filename/with/possible/folders} + # + try: + parent, _, _ = file_id.split("/", maxsplit=2) + + if parent == "api": + # ownership still not defined, so we assume it is user_id + return AccessRights.all() + + # otherwise assert 'parent' string corresponds to a valid UUID + access_rights = await self.get_project_access_rights( + user_id=user_id, project_id=ProjectID(parent) + ) + if not access_rights: + _logger.warning( + "File %s references a project that does not exists in db", + file_id, + ) + return AccessRights.none() + + except (ValueError, AttributeError) as err: + raise InvalidFileIdentifierError( + identifier=file_id, + details=str(err), + ) from err + + return access_rights + + async def get_readable_project_ids( + self, *, connection: AsyncConnection | None = None, user_id: UserID + ) -> list[ProjectID]: + """Returns a list of projects where user has granted read-access""" + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + projects_access_rights = await _list_projects_access_rights(conn, user_id) + return [pid for pid, access in projects_access_rights.items() if access.read] diff --git a/services/storage/src/simcore_service_storage/modules/db/file_meta_data.py b/services/storage/src/simcore_service_storage/modules/db/file_meta_data.py new file mode 100644 index 00000000000..1c942b4f436 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/db/file_meta_data.py @@ -0,0 +1,442 @@ +import contextlib +import datetime +from collections.abc import AsyncGenerator +from pathlib import Path +from typing import TypeAlias + +import sqlalchemy as sa +from models_library.basic_types import SHA256Str +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID, SimcoreS3FileID +from models_library.users import UserID +from models_library.utils.fastapi_encoders import jsonable_encoder +from pydantic import BaseModel +from simcore_postgres_database.storage_models import file_meta_data +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from sqlalchemy import and_, literal_column +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.exc import MultipleResultsFound +from sqlalchemy.ext.asyncio import AsyncConnection + +from ...exceptions.errors import FileMetaDataNotFoundError +from ...models import ( + FileMetaData, + FileMetaDataAtDB, + GenericCursor, + PathMetaData, + UserOrProjectFilter, +) +from ._base import BaseRepository + +TotalChildren: TypeAlias = int + + +class _PathsCursorParameters(BaseModel): + offset: int + file_prefix: Path | None + project_ids: list[ProjectID] | None + partial: bool + + +def _init_pagination( + cursor: GenericCursor | None, + *, + filter_by_project_ids: list[ProjectID] | None, + filter_by_file_prefix: Path | None, + is_partial_prefix: bool, +) -> _PathsCursorParameters: + if cursor: + return _PathsCursorParameters.model_validate_json(cursor) + return _PathsCursorParameters( + offset=0, + file_prefix=filter_by_file_prefix, + project_ids=filter_by_project_ids, + partial=is_partial_prefix, + ) + + +def _create_next_cursor( + total_count: TotalChildren, limit: int, cursor_params: _PathsCursorParameters +) -> GenericCursor | None: + if cursor_params.offset + limit < total_count: + return cursor_params.model_copy( + update={"offset": cursor_params.offset + limit} + ).model_dump_json() + return None + + +def _list_filter_with_partial_file_id_stmt( + *, + user_or_project_filter: UserOrProjectFilter, + file_id_prefix: str | None, + partial_file_id: str | None, + sha256_checksum: SHA256Str | None, + is_directory: bool | None, + limit: int | None = None, + offset: int | None = None, +): + conditions: list = [] + + # Checks access rights (project can be owned or shared) + user_id = user_or_project_filter.user_id + if user_id is not None: + project_ids = user_or_project_filter.project_ids + conditions.append( + sa.or_( + file_meta_data.c.user_id == f"{user_id}", + ( + file_meta_data.c.project_id.in_(f"{_}" for _ in project_ids) + if project_ids + else False + ), + ) + ) + + # Optional filters + if file_id_prefix: + conditions.append(file_meta_data.c.file_id.startswith(file_id_prefix)) + if partial_file_id: + conditions.append(file_meta_data.c.file_id.ilike(f"%{partial_file_id}%")) + if is_directory is not None: + conditions.append(file_meta_data.c.is_directory.is_(is_directory)) + if sha256_checksum: + conditions.append(file_meta_data.c.sha256_checksum == sha256_checksum) + + return ( + sa.select(file_meta_data) + .where(sa.and_(*conditions)) + .order_by(file_meta_data.c.created_at.asc()) # sorted as oldest first + .offset(offset) + .limit(limit) + ) + + +class FileMetaDataRepository(BaseRepository): + async def exists( + self, *, connection: AsyncConnection | None = None, file_id: SimcoreS3FileID + ) -> bool: + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + return bool( + await conn.scalar( + sa.select(sa.func.count()) + .select_from(file_meta_data) + .where(file_meta_data.c.file_id == file_id) + ) + == 1 + ) + + async def upsert( + self, + *, + connection: AsyncConnection | None = None, + fmd: FileMetaData | FileMetaDataAtDB, + ) -> FileMetaDataAtDB: + # NOTE: upsert file_meta_data, if the file already exists, we update the whole row + # so we get the correct time stamps + fmd_db = ( + FileMetaDataAtDB.model_validate(fmd) + if isinstance(fmd, FileMetaData) + else fmd + ) + insert_statement = pg_insert(file_meta_data).values(**fmd_db.model_dump()) + on_update_statement = insert_statement.on_conflict_do_update( + index_elements=[file_meta_data.c.file_id], set_=fmd_db.model_dump() + ).returning(literal_column("*")) + async with transaction_context(self.db_engine, connection) as conn: + result = await conn.execute(on_update_statement) + row = result.one() + return FileMetaDataAtDB.model_validate(row) + + async def insert( + self, *, connection: AsyncConnection | None = None, fmd: FileMetaData + ) -> FileMetaDataAtDB: + fmd_db = FileMetaDataAtDB.model_validate(fmd) + async with transaction_context(self.db_engine, connection) as conn: + result = await conn.execute( + file_meta_data.insert() + .values(jsonable_encoder(fmd_db)) + .returning(literal_column("*")) + ) + row = result.one() + return FileMetaDataAtDB.model_validate(row) + + async def get( + self, *, connection: AsyncConnection | None = None, file_id: SimcoreS3FileID + ) -> FileMetaDataAtDB: + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + result = await conn.execute( + sa.select(file_meta_data).where(file_meta_data.c.file_id == file_id) + ) + if row := result.one_or_none(): + return FileMetaDataAtDB.model_validate(row) + raise FileMetaDataNotFoundError(file_id=file_id) + + async def list_filter_with_partial_file_id( + self, + *, + connection: AsyncConnection | None = None, + user_or_project_filter: UserOrProjectFilter, + file_id_prefix: str | None, + partial_file_id: str | None, + sha256_checksum: SHA256Str | None, + is_directory: bool | None, + limit: int | None = None, + offset: int | None = None, + ) -> list[FileMetaDataAtDB]: + stmt = _list_filter_with_partial_file_id_stmt( + user_or_project_filter=user_or_project_filter, + file_id_prefix=file_id_prefix, + partial_file_id=partial_file_id, + sha256_checksum=sha256_checksum, + is_directory=is_directory, + limit=limit, + offset=offset, + ) + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + return [ + FileMetaDataAtDB.model_validate(row) + async for row in await conn.stream(stmt) + ] + + async def try_get_directory( + self, *, connection: AsyncConnection | None = None, file_filter: Path + ) -> FileMetaData | None: + """Check if the given file filter is a directory or is inside a directory.""" + # we might be exactly on a directory or inside it + potential_directories = (file_filter, *file_filter.parents) + with contextlib.suppress(MultipleResultsFound): + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + for file_id in potential_directories: + # there should be only 1 entry if this is a directory + result = await conn.execute( + sa.select(file_meta_data).where( + file_meta_data.c.file_id == f"{file_id}" + ) + ) + if row := result.one_or_none(): + fmd = FileMetaDataAtDB.model_validate(row) + if fmd.is_directory: + return FileMetaData.from_db_model(fmd) + return None + return None + + async def list_child_paths( + self, + *, + connection: AsyncConnection | None = None, + filter_by_project_ids: list[ProjectID] | None, + filter_by_file_prefix: Path | None, + cursor: GenericCursor | None, + limit: int, + is_partial_prefix: bool, + ) -> tuple[list[PathMetaData], GenericCursor | None, TotalChildren]: + """returns a list of FileMetaDataAtDB that are one level deep. + e.g. when no filter is used, these are top level objects + """ + + cursor_params = _init_pagination( + cursor, + filter_by_project_ids=filter_by_project_ids, + filter_by_file_prefix=filter_by_file_prefix, + is_partial_prefix=is_partial_prefix, + ) + + if cursor_params.file_prefix: + prefix_levels = len(cursor_params.file_prefix.parts) - 1 + search_prefix = ( + f"{cursor_params.file_prefix}%" + if cursor_params.partial + else f"{cursor_params.file_prefix / '%'}" + ) + search_regex = rf"^[^/]+(?:/[^/]+){{{prefix_levels}}}{'' if cursor_params.partial else '/[^/]+'}" + ranked_files = ( + sa.select( + file_meta_data.c.file_id, + sa.func.substring(file_meta_data.c.file_id, search_regex).label( + "path" + ), + sa.func.row_number() + .over( + partition_by=sa.func.substring( + file_meta_data.c.file_id, search_regex + ), + order_by=(file_meta_data.c.file_id.asc(),), + ) + .label("row_num"), + ) + .where( + and_( + file_meta_data.c.file_id.like(search_prefix), + ( + file_meta_data.c.project_id.in_( + [f"{_}" for _ in cursor_params.project_ids] + ) + if cursor_params.project_ids + else True + ), + ) + ) + .cte("ranked_files") + ) + else: + ranked_files = ( + sa.select( + file_meta_data.c.file_id, + sa.func.split_part(file_meta_data.c.file_id, "/", 1).label("path"), + sa.func.row_number() + .over( + partition_by=sa.func.split_part( + file_meta_data.c.file_id, "/", 1 + ), + order_by=(file_meta_data.c.file_id.asc(),), + ) + .label("row_num"), + ) + .where( + file_meta_data.c.project_id.in_( + [f"{_}" for _ in cursor_params.project_ids] + ) + if cursor_params.project_ids + else True + ) + .cte("ranked_files") + ) + + files_query = ( + ( + sa.select(ranked_files, file_meta_data) + .where( + and_( + ranked_files.c.row_num == 1, + ranked_files.c.file_id == file_meta_data.c.file_id, + ) + ) + .order_by(file_meta_data.c.file_id.asc()) + ) + .limit(limit) + .offset(cursor_params.offset) + ) + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + total_count = await conn.scalar( + sa.select(sa.func.count()) + .select_from(ranked_files) + .where(ranked_files.c.row_num == 1) + ) + + items = [ + PathMetaData( + path=row.path + or row.file_id, # NOTE: if path_prefix is partial then path is None + display_path=row.path or row.file_id, + location_id=row.location_id, + location=row.location, + bucket_name=row.bucket_name, + project_id=row.project_id, + node_id=row.node_id, + user_id=row.user_id, + created_at=row.created_at, + last_modified=row.last_modified, + file_meta_data=( + FileMetaData.from_db_model(FileMetaDataAtDB.model_validate(row)) + if row.file_id == row.path and not row.is_directory + else None + ), + ) + async for row in await conn.stream(files_query) + ] + + return ( + items, + _create_next_cursor(total_count, limit, cursor_params), + total_count, + ) + + async def list_fmds( + self, + *, + connection: AsyncConnection | None = None, + user_id: UserID | None = None, + project_ids: list[ProjectID] | None = None, + file_ids: list[SimcoreS3FileID] | None = None, + expired_after: datetime.datetime | None = None, + ) -> list[FileMetaDataAtDB]: + stmt = sa.select(file_meta_data).where( + and_( + (file_meta_data.c.user_id == f"{user_id}") if user_id else True, + ( + (file_meta_data.c.project_id.in_([f"{p}" for p in project_ids])) + if project_ids + else True + ), + (file_meta_data.c.file_id.in_(file_ids)) if file_ids else True, + ( + (file_meta_data.c.upload_expires_at < expired_after) + if expired_after + else True + ), + ) + ) + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + return [ + FileMetaDataAtDB.model_validate(row) + async for row in await conn.stream(stmt) + ] + + async def total(self, *, connection: AsyncConnection | None = None) -> int: + """returns the number of uploaded file entries""" + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + return ( + await conn.scalar( + sa.select(sa.func.count()).select_from(file_meta_data) + ) + or 0 + ) + + async def list_valid_uploads( + self, + *, + connection: AsyncConnection | None = None, + ) -> AsyncGenerator[FileMetaDataAtDB, None]: + """returns all the theoretically valid fmds (e.g. upload_expires_at column is null)""" + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + async for row in await conn.stream( + sa.select(file_meta_data).where( + file_meta_data.c.upload_expires_at.is_( + None + ) # lgtm [py/test-equals-none] + ) + ): + fmd_at_db = FileMetaDataAtDB.model_validate(row) + yield fmd_at_db + + async def delete( + self, + *, + connection: AsyncConnection | None = None, + file_ids: list[SimcoreS3FileID], + ) -> None: + async with transaction_context(self.db_engine, connection) as conn: + await conn.execute( + file_meta_data.delete().where(file_meta_data.c.file_id.in_(file_ids)) + ) + + async def delete_all_from_project( + self, *, connection: AsyncConnection | None = None, project_id: ProjectID + ) -> None: + async with transaction_context(self.db_engine, connection) as conn: + await conn.execute( + file_meta_data.delete().where( + file_meta_data.c.project_id == f"{project_id}" + ) + ) + + async def delete_all_from_node( + self, *, connection: AsyncConnection | None = None, node_id: NodeID + ) -> None: + async with transaction_context(self.db_engine, connection) as conn: + await conn.execute( + file_meta_data.delete().where(file_meta_data.c.node_id == f"{node_id}") + ) diff --git a/services/storage/src/simcore_service_storage/modules/db/projects.py b/services/storage/src/simcore_service_storage/modules/db/projects.py new file mode 100644 index 00000000000..765430a6dd1 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/db/projects.py @@ -0,0 +1,69 @@ +from collections.abc import AsyncIterator +from contextlib import suppress + +import sqlalchemy as sa +from models_library.projects import ProjectAtDB, ProjectID, ProjectIDStr +from models_library.projects_nodes_io import NodeIDStr +from pydantic import ValidationError +from simcore_postgres_database.storage_models import projects +from simcore_postgres_database.utils_repos import pass_or_acquire_connection +from sqlalchemy.ext.asyncio import AsyncConnection + +from ._base import BaseRepository + + +class ProjectRepository(BaseRepository): + async def list_valid_projects_in( + self, + *, + connection: AsyncConnection | None = None, + include_uuids: list[ProjectID], + ) -> AsyncIterator[ProjectAtDB]: + """ + + NOTE that it lists ONLY validated projects in 'project_uuids' + """ + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + async for row in await conn.stream( + sa.select(projects).where( + projects.c.uuid.in_(f"{pid}" for pid in include_uuids) + ) + ): + with suppress(ValidationError): + yield ProjectAtDB.model_validate(row) + + async def project_exists( + self, + *, + connection: AsyncConnection | None = None, + project_uuid: ProjectID, + ) -> bool: + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + return bool( + await conn.scalar( + sa.select(sa.func.count()) + .select_from(projects) + .where(projects.c.uuid == f"{project_uuid}") + ) + == 1 + ) + + async def get_project_id_and_node_id_to_names_map( + self, + *, + connection: AsyncConnection | None = None, + project_uuids: list[ProjectID], + ) -> dict[ProjectID, dict[ProjectIDStr | NodeIDStr, str]]: + mapping = {} + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + async for row in await conn.stream( + sa.select(projects.c.uuid, projects.c.name, projects.c.workbench).where( + projects.c.uuid.in_(f"{pid}" for pid in project_uuids) + ) + ): + mapping[ProjectID(f"{row.uuid}")] = {f"{row.uuid}": row.name} | { + f"{node_id}": node["label"] + for node_id, node in row.workbench.items() + } + + return mapping diff --git a/services/storage/src/simcore_service_storage/modules/db/tokens.py b/services/storage/src/simcore_service_storage/modules/db/tokens.py new file mode 100644 index 00000000000..cf331ea3ca6 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/db/tokens.py @@ -0,0 +1,31 @@ +import logging + +import sqlalchemy as sa +from models_library.users import UserID +from simcore_postgres_database.storage_models import tokens +from simcore_postgres_database.utils_repos import pass_or_acquire_connection +from sqlalchemy.ext.asyncio import AsyncConnection + +from ._base import BaseRepository + +_logger = logging.getLogger(__name__) + + +class TokenRepository(BaseRepository): + async def get_api_token_and_secret( + self, *, connection: AsyncConnection | None = None, user_id: UserID + ) -> tuple[str | None, str | None]: + async with pass_or_acquire_connection(self.db_engine, connection) as conn: + result = await conn.execute( + sa.select( + tokens, + ).where(tokens.c.user_id == user_id) + ) + row = result.one_or_none() + data = row._asdict() if row else {} + + data = data.get("token_data", {}) + api_token = data.get("token_key") + api_secret = data.get("token_secret") + + return api_token, api_secret diff --git a/services/storage/src/simcore_service_storage/modules/long_running_tasks.py b/services/storage/src/simcore_service_storage/modules/long_running_tasks.py new file mode 100644 index 00000000000..d0c929f7adc --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/long_running_tasks.py @@ -0,0 +1,20 @@ +import asyncio + +from fastapi import FastAPI +from servicelib.fastapi.long_running_tasks._server import setup + +from .._meta import API_VTAG + + +def setup_rest_api_long_running_tasks_for_uploads(app: FastAPI) -> None: + setup( + app, + router_prefix=f"/{API_VTAG}/futures", + ) + + app.state.completed_upload_tasks = {} + + +def get_completed_upload_tasks(app: FastAPI) -> dict[str, asyncio.Task]: + assert isinstance(app.state.completed_upload_tasks, dict) # nosec + return app.state.completed_upload_tasks diff --git a/services/storage/src/simcore_service_storage/modules/rabbitmq.py b/services/storage/src/simcore_service_storage/modules/rabbitmq.py new file mode 100644 index 00000000000..27b2f291630 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/rabbitmq.py @@ -0,0 +1,49 @@ +import logging +from typing import cast + +from fastapi import FastAPI +from servicelib.logging_utils import log_context +from servicelib.rabbitmq import ( + RabbitMQRPCClient, + wait_till_rabbitmq_responsive, +) +from settings_library.rabbit import RabbitSettings + +from ..exceptions.errors import ConfigurationError + +_logger = logging.getLogger(__name__) + + +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + with log_context( + _logger, + logging.INFO, + msg="Storage startup Rabbitmq", + ): + rabbit_settings: RabbitSettings | None = app.state.settings.STORAGE_RABBITMQ + if not rabbit_settings: + raise ConfigurationError( + msg="RabbitMQ client is de-activated in the settings" + ) + await wait_till_rabbitmq_responsive(rabbit_settings.dsn) + app.state.rabbitmq_rpc_server = await RabbitMQRPCClient.create( + client_name="storage_rpc_server", settings=rabbit_settings + ) + + async def on_shutdown() -> None: + with log_context( + _logger, + logging.INFO, + msg="Storage shutdown Rabbitmq", + ): + if app.state.rabbitmq_rpc_server: + await app.state.rabbitmq_rpc_server.close() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_rabbitmq_rpc_server(app: FastAPI) -> RabbitMQRPCClient: + assert app.state.rabbitmq_rpc_server # nosec + return cast(RabbitMQRPCClient, app.state.rabbitmq_rpc_server) diff --git a/services/storage/src/simcore_service_storage/modules/redis.py b/services/storage/src/simcore_service_storage/modules/redis.py new file mode 100644 index 00000000000..6b2c15476ec --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/redis.py @@ -0,0 +1,34 @@ +import logging +from typing import cast + +from fastapi import FastAPI +from servicelib.redis import RedisClientSDK +from settings_library.redis import RedisDatabase + +from .._meta import APP_NAME +from ..core.settings import get_application_settings + +_logger = logging.getLogger(__name__) + + +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.redis_client_sdk = None + redis_settings = get_application_settings(app).STORAGE_REDIS + assert redis_settings # nosec + redis_locks_dsn = redis_settings.build_redis_dsn(RedisDatabase.LOCKS) + app.state.redis_client_sdk = RedisClientSDK( + redis_locks_dsn, client_name=APP_NAME + ) + + async def on_shutdown() -> None: + redis_client_sdk = app.state.redis_client_sdk + if redis_client_sdk: + await cast(RedisClientSDK, app.state.redis_client_sdk).shutdown() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_redis_client(app: FastAPI) -> RedisClientSDK: + return cast(RedisClientSDK, app.state.redis_client_sdk) diff --git a/services/storage/src/simcore_service_storage/modules/s3.py b/services/storage/src/simcore_service_storage/modules/s3.py new file mode 100644 index 00000000000..7fb81b29973 --- /dev/null +++ b/services/storage/src/simcore_service_storage/modules/s3.py @@ -0,0 +1,69 @@ +"""Module to access s3 service""" + +import logging +from typing import Literal, cast + +from aws_library.s3 import SimcoreS3API +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from pydantic import TypeAdapter +from servicelib.logging_utils import log_context +from tenacity.asyncio import AsyncRetrying +from tenacity.before_sleep import before_sleep_log +from tenacity.wait import wait_fixed +from types_aiobotocore_s3.literals import BucketLocationConstraintType + +from ..constants import RETRY_WAIT_SECS +from ..core.settings import get_application_settings +from ..exceptions.errors import ConfigurationError + +_logger = logging.getLogger(__name__) + + +def setup_s3(app: FastAPI) -> None: + async def _on_startup() -> None: + app.state.s3_client = None + settings = get_application_settings(app) + + async for attempt in AsyncRetrying( + wait=wait_fixed(RETRY_WAIT_SECS), + before_sleep=before_sleep_log(_logger, logging.WARNING), + reraise=True, + ): + with attempt: + assert settings.STORAGE_S3 # nosec + client = await SimcoreS3API.create( + settings.STORAGE_S3, + settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, + ) + _logger.info( + "S3 client %s successfully created [%s]", + f"{client=}", + json_dumps(attempt.retry_state.retry_object.statistics), + ) + assert client # nosec + app.state.s3_client = client + + with log_context(_logger, logging.DEBUG, msg="setup.s3_bucket.cleanup_ctx"): + assert settings.STORAGE_S3 # nosec + await client.create_bucket( + bucket=settings.STORAGE_S3.S3_BUCKET_NAME, + region=TypeAdapter( + BucketLocationConstraintType | Literal["us-east-1"] + ).validate_python(settings.STORAGE_S3.S3_REGION), + ) + + async def _on_shutdown() -> None: + if app.state.s3_client: + await cast(SimcoreS3API, app.state.s3_client).close() + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + +def get_s3_client(app: FastAPI) -> SimcoreS3API: + if not app.state.s3_client: + raise ConfigurationError( + msg="S3 client is not available. Please check the configuration." + ) + return cast(SimcoreS3API, app.state.s3_client) diff --git a/services/storage/src/simcore_service_storage/sts.py b/services/storage/src/simcore_service_storage/modules/sts.py similarity index 62% rename from services/storage/src/simcore_service_storage/sts.py rename to services/storage/src/simcore_service_storage/modules/sts.py index 85b9ffb4ecc..3bd5dcd2d27 100644 --- a/services/storage/src/simcore_service_storage/sts.py +++ b/services/storage/src/simcore_service_storage/modules/sts.py @@ -3,16 +3,16 @@ https://docs.aws.amazon.com/STS/latest/APIReference/welcome.html """ -from aiohttp import web +from fastapi import FastAPI from models_library.users import UserID -from servicelib.aiohttp.application_keys import APP_CONFIG_KEY from settings_library.s3 import S3Settings -from .settings import Settings +from ..core.settings import get_application_settings async def get_or_create_temporary_token_for_user( - app: web.Application, _user_id: UserID + app: FastAPI, _user_id: UserID ) -> S3Settings: - app_settings: Settings = app[APP_CONFIG_KEY] + app_settings = get_application_settings(app) + assert app_settings.STORAGE_S3 # nosec return app_settings.STORAGE_S3 diff --git a/services/storage/src/simcore_service_storage/redis.py b/services/storage/src/simcore_service_storage/redis.py deleted file mode 100644 index 9bf600e3907..00000000000 --- a/services/storage/src/simcore_service_storage/redis.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging -from typing import cast - -from aiohttp import web -from servicelib.redis import RedisClientSDK -from settings_library.redis import RedisDatabase, RedisSettings - -from ._meta import APP_NAME -from .constants import APP_CONFIG_KEY -from .settings import Settings - -_logger = logging.getLogger(__name__) - -_APP_REDIS_KEY = "APP_REDIS_KEY" - - -def setup_redis(app: web.Application): - async def _setup(app: web.Application): - app[_APP_REDIS_KEY] = None - settings: Settings = app[APP_CONFIG_KEY] - assert settings.STORAGE_REDIS # nosec - redis_settings: RedisSettings = settings.STORAGE_REDIS - redis_locks_dsn = redis_settings.build_redis_dsn(RedisDatabase.LOCKS) - app[_APP_REDIS_KEY] = client = RedisClientSDK( - redis_locks_dsn, client_name=APP_NAME - ) - - yield - - if client: - await client.shutdown() - - app.cleanup_ctx.append(_setup) - - -def get_redis_client(app: web.Application) -> RedisClientSDK: - return cast(RedisClientSDK, app[_APP_REDIS_KEY]) diff --git a/services/storage/src/simcore_service_storage/rest.py b/services/storage/src/simcore_service_storage/rest.py deleted file mode 100644 index 8fc8ca74986..00000000000 --- a/services/storage/src/simcore_service_storage/rest.py +++ /dev/null @@ -1,70 +0,0 @@ -""" RESTful API for simcore_service_storage - -""" - -import logging -from pathlib import Path - -from aiohttp import web -from aiohttp_swagger import setup_swagger # type: ignore[import-untyped] -from servicelib.aiohttp.rest_middlewares import append_rest_middlewares -from servicelib.aiohttp.rest_utils import ( - get_named_routes_as_message, - set_default_route_names, -) - -from . import ( - handlers_datasets, - handlers_files, - handlers_health, - handlers_locations, - handlers_simcore_s3, -) -from ._meta import API_VTAG -from .handlers_files import UPLOAD_TASKS_KEY -from .resources import storage_resources - -_logger = logging.getLogger(__name__) - - -def setup_rest(app: web.Application): - """Setup the rest API module in the application in aiohttp fashion. - - - loads and validate openapi specs from a remote (e.g. apihub) or local location - - connects openapi specs paths to handlers (see rest_routes.py) - - enables error, validation and envelope middlewares on API routes - - - IMPORTANT: this is a critical subsystem. Any failure should stop - the system startup. It CANNOT be simply disabled & continue - """ - _logger.debug("Setting up %s ...", __name__) - - spec_path: Path = storage_resources.get_path("api/v0/openapi.yaml") - - # Connects handlers - for routes in [ - handlers_health.routes, - handlers_locations.routes, - handlers_datasets.routes, - handlers_files.routes, - handlers_simcore_s3.routes, - ]: - set_default_route_names(routes) - app.router.add_routes(routes) - - _logger.debug("routes: %s", get_named_routes_as_message(app)) - - # prepare container for upload tasks - app[UPLOAD_TASKS_KEY] = {} - - # Enable error, validation and envelop middleware on API routes - append_rest_middlewares(app, api_version=f"/{API_VTAG}") - - # Adds swagger doc UI - setup_swagger( - app, - swagger_url="/dev/doc", - swagger_from_file=f"{spec_path}", - ui_version=3, - ) diff --git a/services/storage/src/simcore_service_storage/s3.py b/services/storage/src/simcore_service_storage/s3.py deleted file mode 100644 index e96782b54ca..00000000000 --- a/services/storage/src/simcore_service_storage/s3.py +++ /dev/null @@ -1,77 +0,0 @@ -""" Module to access s3 service - -""" - -import logging -from collections.abc import AsyncGenerator -from typing import cast - -from aiohttp import web -from aws_library.s3 import SimcoreS3API -from common_library.json_serialization import json_dumps -from servicelib.logging_utils import log_context -from tenacity.asyncio import AsyncRetrying -from tenacity.before_sleep import before_sleep_log -from tenacity.wait import wait_fixed - -from .constants import APP_CONFIG_KEY, APP_S3_KEY, RETRY_WAIT_SECS -from .settings import Settings - -log = logging.getLogger(__name__) - - -async def setup_s3_client(app) -> AsyncGenerator[None, None]: - client = None - - with log_context(log, logging.DEBUG, msg="setup.s3_client.cleanup_ctx"): - storage_settings: Settings = app[APP_CONFIG_KEY] - storage_s3_settings = storage_settings.STORAGE_S3 - assert storage_s3_settings # nosec - - async for attempt in AsyncRetrying( - wait=wait_fixed(RETRY_WAIT_SECS), - before_sleep=before_sleep_log(log, logging.WARNING), - reraise=True, - ): - with attempt: - client = await SimcoreS3API.create( - storage_s3_settings, - storage_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - log.info( - "S3 client %s successfully created [%s]", - f"{client=}", - json_dumps(attempt.retry_state.retry_object.statistics), - ) - assert client # nosec - app[APP_S3_KEY] = client - - yield - - with log_context(log, logging.DEBUG, msg="teardown.s3_client.cleanup_ctx"): - if client: - await client.close() - - -async def setup_s3_bucket(app: web.Application): - with log_context(log, logging.DEBUG, msg="setup.s3_bucket.cleanup_ctx"): - storage_s3_settings = app[APP_CONFIG_KEY].STORAGE_S3 - client = get_s3_client(app) - await client.create_bucket( - bucket=storage_s3_settings.S3_BUCKET_NAME, - region=storage_s3_settings.S3_REGION, - ) - yield - - -def setup_s3(app: web.Application): - if setup_s3_client not in app.cleanup_ctx: - app.cleanup_ctx.append(setup_s3_client) - if setup_s3_bucket not in app.cleanup_ctx: - app.cleanup_ctx.append(setup_s3_bucket) - - -def get_s3_client(app: web.Application) -> SimcoreS3API: - assert app[APP_S3_KEY] # nosec - assert isinstance(app[APP_S3_KEY], SimcoreS3API) # nosec - return cast(SimcoreS3API, app[APP_S3_KEY]) diff --git a/services/storage/src/simcore_service_storage/settings.py b/services/storage/src/simcore_service_storage/settings.py deleted file mode 100644 index 75d25311fcd..00000000000 --- a/services/storage/src/simcore_service_storage/settings.py +++ /dev/null @@ -1,115 +0,0 @@ -from typing import Annotated, Self - -from pydantic import ( - AliasChoices, - Field, - PositiveInt, - TypeAdapter, - field_validator, - model_validator, -) -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring -from settings_library.base import BaseCustomSettings -from settings_library.basic_types import LogLevel, PortInt -from settings_library.postgres import PostgresSettings -from settings_library.redis import RedisSettings -from settings_library.s3 import S3Settings -from settings_library.tracing import TracingSettings -from settings_library.utils_logging import MixinLoggingSettings - -from .datcore_adapter.datcore_adapter_settings import DatcoreAdapterSettings - - -class Settings(BaseCustomSettings, MixinLoggingSettings): - STORAGE_HOST: str = "0.0.0.0" # nosec - STORAGE_PORT: PortInt = TypeAdapter(PortInt).validate_python(8080) - - LOG_LEVEL: Annotated[ - LogLevel, - Field( - validation_alias=AliasChoices("STORAGE_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), - ), - ] = LogLevel.INFO - - STORAGE_MAX_WORKERS: PositiveInt = Field( - 8, - description="Number of workers for the thead executor pool used in DatcoreWrapper", - ) - - STORAGE_MONITORING_ENABLED: bool = False - STORAGE_PROFILING: bool = False - - BF_API_KEY: str | None = Field( - None, description="Pennsieve API key ONLY for testing purposes" - ) - BF_API_SECRET: str | None = Field( - None, description="Pennsieve API secret ONLY for testing purposes" - ) - - STORAGE_POSTGRES: PostgresSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - STORAGE_REDIS: RedisSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - STORAGE_S3: S3Settings = Field(json_schema_extra={"auto_default_from_env": True}) - - STORAGE_TRACING: TracingSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - DATCORE_ADAPTER: DatcoreAdapterSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - STORAGE_SYNC_METADATA_TIMEOUT: PositiveInt = Field( - 180, description="Timeout (seconds) for metadata sync task" - ) - - STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS: int = Field( - 3600, description="Default expiration time in seconds for presigned links" - ) - - STORAGE_CLEANER_INTERVAL_S: int | None = Field( - 30, - description="Interval in seconds when task cleaning pending uploads runs. setting to NULL disables the cleaner.", - ) - - STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY: int = Field( - 4, - description="Maximal amount of threads used by underlying S3 client to transfer data to S3 backend", - ) - - STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", - ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - STORAGE_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "STORAGE_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" - ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) - - @field_validator("LOG_LEVEL", mode="before") - @classmethod - def _validate_loglevel(cls, value: str) -> str: - log_level: str = cls.validate_log_level(value) - return log_level - - @model_validator(mode="after") - def _ensure_settings_consistency(self) -> Self: - if self.STORAGE_CLEANER_INTERVAL_S is not None and not self.STORAGE_REDIS: - msg = ( - "STORAGE_CLEANER_INTERVAL_S cleaner cannot be set without STORAGE_REDIS! " - "Please correct settings." - ) - raise ValueError(msg) - return self diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index d41630b5230..6448cc0cc59 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -9,18 +9,16 @@ from pathlib import Path from typing import Any, Final, cast -import arrow -from aiohttp import web -from aiopg.sa import Engine -from aiopg.sa.connection import SAConnection from aws_library.s3 import ( CopiedBytesTransferredCallback, S3DirectoryMetaData, S3KeyNotFoundError, S3MetaData, UploadedBytesTransferredCallback, + UploadID, ) -from models_library.api_schemas_storage import ( +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( UNDEFINED_SIZE, UNDEFINED_SIZE_TYPE, LinkType, @@ -36,16 +34,15 @@ StorageFileID, ) from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter -from servicelib.aiohttp.client_session import get_client_session +from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter, ValidationError from servicelib.aiohttp.long_running_tasks.server import TaskProgress +from servicelib.fastapi.client_session import get_client_session from servicelib.logging_utils import log_context from servicelib.utils import ensure_ends_with, limited_gather +from simcore_postgres_database.utils_repos import transaction_context +from sqlalchemy.ext.asyncio import AsyncEngine -from . import db_file_meta_data, db_projects, db_tokens from .constants import ( - APP_AIOPG_ENGINE_KEY, - APP_CONFIG_KEY, DATCORE_ID, EXPAND_DIR_MAX_ITEM_COUNT, MAX_CONCURRENT_S3_TASKS, @@ -54,15 +51,9 @@ SIMCORE_S3_ID, SIMCORE_S3_STR, ) -from .datcore_adapter import datcore_adapter -from .db_access_layer import ( - AccessRights, - get_file_access_rights, - get_project_access_rights, - get_readable_project_ids, -) +from .core.settings import get_application_settings from .dsm_factory import BaseDataManager -from .exceptions import ( +from .exceptions.errors import ( FileAccessRightError, FileMetaDataNotFoundError, LinkAlreadyExistsError, @@ -73,19 +64,29 @@ DatasetMetaData, FileMetaData, FileMetaDataAtDB, - UploadID, + GenericCursor, + PathMetaData, + TotalNumber, UploadLinks, UserOrProjectFilter, ) -from .s3 import get_s3_client -from .s3_utils import S3TransferDataCB, update_task_progress -from .settings import Settings -from .simcore_s3_dsm_utils import ( +from .modules.datcore_adapter import datcore_adapter +from .modules.db import get_db_engine +from .modules.db.access_layer import AccessLayerRepository +from .modules.db.file_meta_data import FileMetaDataRepository +from .modules.db.projects import ProjectRepository +from .modules.db.tokens import TokenRepository +from .modules.s3 import get_s3_client +from .utils.s3_utils import S3TransferDataCB, update_task_progress +from .utils.simcore_s3_dsm_utils import ( compute_file_id_prefix, expand_directory, + get_accessible_project_ids, get_directory_file_id, + list_child_paths_from_repository, + list_child_paths_from_s3, ) -from .utils import ( +from .utils.utils import ( convert_db_to_model, download_to_file_or_raise, is_file_entry_valid, @@ -98,12 +99,45 @@ _logger = logging.getLogger(__name__) +async def _add_frontend_needed_data( + engine: AsyncEngine, + *, + project_ids: list[ProjectID], + data: list[FileMetaData], +) -> list[FileMetaData]: + # artifically fills ['project_name', 'node_name', 'file_id', 'raw_file_path', 'display_file_path'] + # with information from the projects table! + # NOTE: This part with the projects, should be done in the client code not here! + + prj_names_mapping: dict[ProjectID | NodeID, str] = {} + + async for proj_data in ProjectRepository.instance(engine).list_valid_projects_in( + include_uuids=project_ids + ): + prj_names_mapping |= {proj_data.uuid: proj_data.name} | { + NodeID(node_id): node_data.label + for node_id, node_data in proj_data.workbench.items() + } + + clean_data: list[FileMetaData] = [] + for d in data: + if d.project_id not in prj_names_mapping: + continue + assert d.project_id # nosec + d.project_name = prj_names_mapping[d.project_id] + if d.node_id in prj_names_mapping: + assert d.node_id # nosec + d.node_name = prj_names_mapping[d.node_id] + if d.node_name and d.project_name: + clean_data.append(d) + + return clean_data + + @dataclass class SimcoreS3DataManager(BaseDataManager): - engine: Engine simcore_bucket_name: S3BucketName - app: web.Application - settings: Settings + app: FastAPI @classmethod def get_location_id(cls) -> LocationID: @@ -117,17 +151,19 @@ async def authorized(self, _user_id: UserID) -> bool: return True # always true for now async def list_datasets(self, user_id: UserID) -> list[DatasetMetaData]: - async with self.engine.acquire() as conn: - readable_projects_ids = await get_readable_project_ids(conn, user_id) - return [ - DatasetMetaData( - dataset_id=prj_data.uuid, - display_name=prj_data.name, - ) - async for prj_data in db_projects.list_valid_projects_in( - conn, readable_projects_ids - ) - ] + readable_projects_ids = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_readable_project_ids(user_id=user_id) + + return [ + DatasetMetaData( + dataset_id=prj_data.uuid, + display_name=prj_data.name, + ) + async for prj_data in ProjectRepository.instance( + get_db_engine(self.app) + ).list_valid_projects_in(include_uuids=readable_projects_ids) + ] async def list_files_in_dataset( self, user_id: UserID, dataset_id: str, *, expand_dirs: bool @@ -141,7 +177,143 @@ async def list_files_in_dataset( ) return data - async def list_files( # noqa C901 + async def list_paths( + self, + user_id: UserID, + *, + file_filter: Path | None, + cursor: GenericCursor | None, + limit: NonNegativeInt, + ) -> tuple[list[PathMetaData], GenericCursor | None, TotalNumber | None]: + """returns a page of the file meta data a user has access to""" + + next_cursor: GenericCursor | None = None + total: TotalNumber | None = None + # if we have a file_filter, that means that we have potentially a project ID + project_id = None + with contextlib.suppress(ValueError): + # NOTE: we currently do not support anything else than project_id/node_id/file_path here, sorry chap + project_id = ProjectID(file_filter.parts[0]) if file_filter else None + + accessible_projects_ids = await get_accessible_project_ids( + get_db_engine(self.app), user_id=user_id, project_id=project_id + ) + + # check if the file_filter is a directory or inside one + dir_fmd = None + if file_filter: + dir_fmd = await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).try_get_directory(file_filter=file_filter) + + if dir_fmd: + # NOTE: files are not listed in the DB but in S3 only + assert file_filter # nosec + assert project_id # nosec + (paths_metadata, next_cursor) = await list_child_paths_from_s3( + get_s3_client(self.app), + dir_fmd=dir_fmd, + bucket=self.simcore_bucket_name, + file_filter=file_filter, + limit=limit, + cursor=cursor, + ) + else: + # NOTE: files are DB-based + ( + paths_metadata, + next_cursor, + total, + ) = await list_child_paths_from_repository( + get_db_engine(self.app), + filter_by_project_ids=accessible_projects_ids, + filter_by_file_prefix=file_filter, + limit=limit, + cursor=cursor, + ) + + # extract the returned project_ids + project_ids = list( + {path.project_id for path in paths_metadata if path.project_id is not None} + ) + + ids_names_map = await ProjectRepository.instance( + get_db_engine(self.app) + ).get_project_id_and_node_id_to_names_map(project_uuids=project_ids) + + for path in paths_metadata: + if path.project_id is not None: + id_name_map = ids_names_map.get(path.project_id, {}) + path.update_display_fields(id_name_map) + + return paths_metadata, next_cursor, total + + async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: + """returns the total size of an arbitrary path""" + # check access rights first + project_id = None + with contextlib.suppress(ValueError): + # NOTE: we currently do not support anything else than project_id/node_id/file_path here, sorry chap + project_id = ProjectID(path.parts[0]) + + accessible_projects_ids = await get_accessible_project_ids( + get_db_engine(self.app), user_id=user_id, project_id=project_id + ) + + # use-cases: + # 1. path is not a valid StorageFileID (e.g. a project or project/node) --> all entries are in the DB (files and folder) + # 2. path is valid StorageFileID and not in the DB --> entries are only in S3 + # 3. path is valid StorageFileID and in the DB --> return directly from the DB + + use_db_data = True + with contextlib.suppress(ValidationError): + file_id: StorageFileID = TypeAdapter(StorageFileID).validate_python( + f"{path}" + ) + # path is a valid StorageFileID + + if ( + dir_fmd := await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).try_get_directory(file_filter=path) + ) and dir_fmd.file_id != file_id: + # this is pure S3 aka use-case 2 + use_db_data = False + + if not use_db_data: + assert file_id # nosec + s3_metadata = await get_s3_client(self.app).get_directory_metadata( + bucket=self.simcore_bucket_name, prefix=file_id + ) + assert s3_metadata.size # nosec + return s3_metadata.size + + # all other use-cases are in the DB + fmds = await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).list_filter_with_partial_file_id( + user_or_project_filter=UserOrProjectFilter( + user_id=user_id, project_ids=accessible_projects_ids + ), + file_id_prefix=f"{path}", + partial_file_id=None, + sha256_checksum=None, + is_directory=None, + ) + + # ensure file sizes are uptodate + updated_fmds = [] + for metadata in fmds: + if is_file_entry_valid(metadata): + updated_fmds.append(convert_db_to_model(metadata)) + continue + updated_fmds.append( + convert_db_to_model(await self._update_database_from_storage(metadata)) + ) + + return ByteSize(sum(fmd.file_size for fmd in updated_fmds)) + + async def list_files( self, user_id: UserID, *, @@ -163,32 +335,33 @@ async def list_files( # noqa C901 data: list[FileMetaData] = [] accessible_projects_ids = [] uid: UserID | None = None - async with self.engine.acquire() as conn: - if project_id is not None: - project_access_rights = await get_project_access_rights( - conn=conn, user_id=user_id, project_id=project_id + access_layer_repo = AccessLayerRepository.instance(get_db_engine(self.app)) + if project_id is not None: + project_access_rights = await access_layer_repo.get_project_access_rights( + user_id=user_id, project_id=project_id + ) + if not project_access_rights.read: + raise ProjectAccessRightError( + access_right="read", project_id=project_id ) - if not project_access_rights.read: - raise ProjectAccessRightError( - access_right="read", project_id=project_id - ) - accessible_projects_ids = [project_id] - uid = None - else: - accessible_projects_ids = await get_readable_project_ids(conn, user_id) - uid = user_id - file_and_directory_meta_data: list[ - FileMetaDataAtDB - ] = await db_file_meta_data.list_filter_with_partial_file_id( - conn, - user_or_project_filter=UserOrProjectFilter( - user_id=uid, project_ids=accessible_projects_ids - ), - file_id_prefix=None, - is_directory=None, - partial_file_id=uuid_filter, - sha256_checksum=None, + accessible_projects_ids = [project_id] + uid = None + else: + accessible_projects_ids = await access_layer_repo.get_readable_project_ids( + user_id=user_id ) + uid = user_id + file_and_directory_meta_data = await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).list_filter_with_partial_file_id( + user_or_project_filter=UserOrProjectFilter( + user_id=uid, project_ids=accessible_projects_ids + ), + file_id_prefix=None, + is_directory=None, + partial_file_id=uuid_filter, + sha256_checksum=None, + ) # add all the entries from file_meta_data without for metadata in file_and_directory_meta_data: @@ -204,17 +377,6 @@ async def list_files( # noqa C901 updated_fmd = await self._update_database_from_storage(metadata) data.append(convert_db_to_model(updated_fmd)) - # now parse the project to search for node/project names - async with self.engine.acquire() as conn: - prj_names_mapping: dict[ProjectID | NodeID, str] = {} - async for proj_data in db_projects.list_valid_projects_in( - conn, accessible_projects_ids - ): - prj_names_mapping |= {proj_data.uuid: proj_data.name} | { - NodeID(node_id): node_data.label - for node_id, node_data in proj_data.workbench.items() - } - # expand directories until the max number of files to return is reached directory_expands: list[Coroutine] = [] for metadata in file_and_directory_meta_data: @@ -237,39 +399,33 @@ async def list_files( # noqa C901 ): data.extend(files_in_directory) - # artifically fills ['project_name', 'node_name', 'file_id', 'raw_file_path', 'display_file_path'] - # with information from the projects table! - # NOTE: This part with the projects, should be done in the client code not here! - clean_data: list[FileMetaData] = [] - for d in data: - if d.project_id not in prj_names_mapping: - continue - d.project_name = prj_names_mapping[d.project_id] - if d.node_id in prj_names_mapping: - d.node_name = prj_names_mapping[d.node_id] - if d.node_name and d.project_name: - clean_data.append(d) - - data = clean_data - return data + return await _add_frontend_needed_data( + get_db_engine(self.app), project_ids=accessible_projects_ids, data=data + ) async def get_file(self, user_id: UserID, file_id: StorageFileID) -> FileMetaData: - async with self.engine.acquire() as conn: - can: AccessRights = await get_file_access_rights( - conn, int(user_id), file_id - ) - if not can.read: - raise FileAccessRightError(access_right="read", file_id=file_id) + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights(user_id=user_id, file_id=file_id) + if not can.read: + raise FileAccessRightError(access_right="read", file_id=file_id) - fmd = await db_file_meta_data.get( - conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id) - ) + fmd = await FileMetaDataRepository.instance(get_db_engine(self.app)).get( + file_id=TypeAdapter(SimcoreS3FileID).validate_python(file_id) + ) if is_file_entry_valid(fmd): return convert_db_to_model(fmd) # get file from storage if available fmd = await self._update_database_from_storage(fmd) return convert_db_to_model(fmd) + async def can_read_file(self, user_id: UserID, file_id: StorageFileID): + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights(user_id=user_id, file_id=file_id) + if not can.read: + raise FileAccessRightError(access_right="read", file_id=file_id) + async def create_file_upload_links( self, user_id: UserID, @@ -280,17 +436,18 @@ async def create_file_upload_links( sha256_checksum: SHA256Str | None, is_directory: bool, ) -> UploadLinks: - async with self.engine.acquire() as conn: - can: AccessRights = await get_file_access_rights(conn, user_id, file_id) - if not can.write: - raise FileAccessRightError(access_right="write", file_id=file_id) - - # NOTE: if this gets called successively with the same file_id, and - # there was a multipart upload in progress beforehand, it MUST be - # cancelled to prevent unwanted costs in AWS - await self._clean_pending_upload( - conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id) - ) + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights(user_id=user_id, file_id=file_id) + if not can.write: + raise FileAccessRightError(access_right="write", file_id=file_id) + + # NOTE: if this gets called successively with the same file_id, and + # there was a multipart upload in progress beforehand, it MUST be + # cancelled to prevent unwanted costs in AWS + await self._clean_pending_upload( + TypeAdapter(SimcoreS3FileID).validate_python(file_id) + ) if ( not is_directory @@ -305,23 +462,21 @@ async def create_file_upload_links( # SEE https://github.com/ITISFoundation/osparc-simcore/issues/5159 enforce_access_rights=False, ) - async with self.engine.acquire() as conn: - # initiate the file meta data table - fmd = await self._create_fmd_for_upload( - conn, - user_id, - file_id, - upload_id=( - S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID - if ( - get_s3_client(self.app).is_multipart(file_size_bytes) - or link_type == LinkType.S3 - ) - else None - ), - is_directory=is_directory, - sha256_checksum=sha256_checksum, - ) + # initiate the file meta data table + fmd = await self._create_fmd_for_upload( + user_id, + file_id, + upload_id=( + S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID + if ( + get_s3_client(self.app).is_multipart(file_size_bytes) + or link_type == LinkType.S3 + ) + else None + ), + is_directory=is_directory, + sha256_checksum=sha256_checksum, + ) if link_type == LinkType.PRESIGNED and get_s3_client(self.app).is_multipart( file_size_bytes @@ -334,13 +489,16 @@ async def create_file_upload_links( bucket=fmd.bucket_name, object_key=fmd.file_id, file_size=file_size_bytes, - expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + expiration_secs=get_application_settings( + self.app + ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, sha256_checksum=fmd.sha256_checksum, ) # update the database so we keep the upload id fmd.upload_id = multipart_presigned_links.upload_id - async with self.engine.acquire() as conn: - await db_file_meta_data.upsert(conn, fmd) + await FileMetaDataRepository.instance(get_db_engine(self.app)).upsert( + fmd=fmd + ) return UploadLinks( multipart_presigned_links.urls, multipart_presigned_links.chunk_size, @@ -352,7 +510,9 @@ async def create_file_upload_links( ).create_single_presigned_upload_link( bucket=self.simcore_bucket_name, object_key=fmd.file_id, - expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + expiration_secs=get_application_settings( + self.app + ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, ) return UploadLinks( [single_presigned_link], @@ -373,16 +533,15 @@ async def abort_file_upload( user_id: UserID, file_id: StorageFileID, ) -> None: - async with self.engine.acquire() as conn: - can: AccessRights = await get_file_access_rights( - conn, int(user_id), file_id - ) - if not can.delete or not can.write: - raise FileAccessRightError(access_right="write/delete", file_id=file_id) - - fmd: FileMetaDataAtDB = await db_file_meta_data.get( - conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id) - ) + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights(user_id=user_id, file_id=file_id) + if not can.delete or not can.write: + raise FileAccessRightError(access_right="write/delete", file_id=file_id) + + fmd = await FileMetaDataRepository.instance(get_db_engine(self.app)).get( + file_id=TypeAdapter(SimcoreS3FileID).validate_python(file_id) + ) if is_valid_managed_multipart_upload(fmd.upload_id): assert fmd.upload_id # nosec await get_s3_client(self.app).abort_multipart_upload( @@ -401,8 +560,9 @@ async def abort_file_upload( await self._update_database_from_storage(fmd) except S3KeyNotFoundError: # the file does not exist, so we delete the entry in the db - async with self.engine.acquire() as conn: - await db_file_meta_data.delete(conn, [fmd.file_id]) + await FileMetaDataRepository.instance(get_db_engine(self.app)).delete( + file_ids=[fmd.file_id] + ) async def complete_file_upload( self, @@ -410,15 +570,14 @@ async def complete_file_upload( user_id: UserID, uploaded_parts: list[UploadedPart], ) -> FileMetaData: - async with self.engine.acquire() as conn: - can: AccessRights = await get_file_access_rights( - conn, int(user_id), file_id - ) - if not can.write: - raise FileAccessRightError(access_right="write", file_id=file_id) - fmd = await db_file_meta_data.get( - conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id) - ) + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights(user_id=user_id, file_id=file_id) + if not can.write: + raise FileAccessRightError(access_right="write", file_id=file_id) + fmd = await FileMetaDataRepository.instance(get_db_engine(self.app)).get( + file_id=TypeAdapter(SimcoreS3FileID).validate_python(file_id) + ) if is_valid_managed_multipart_upload(fmd.upload_id): # NOTE: Processing of a Complete Multipart Upload request @@ -452,44 +611,41 @@ async def create_file_download_link( 3. Raises FileNotFoundError if the file does not exist 4. Raises FileAccessRightError if the user does not have access to the file """ - async with self.engine.acquire() as conn: - directory_file_id: SimcoreS3FileID | None = await get_directory_file_id( - conn, cast(SimcoreS3FileID, file_id) - ) - await self.__ensure_read_access_rights( - conn, user_id, directory_file_id if directory_file_id else file_id + directory_file_id = await get_directory_file_id( + get_db_engine(self.app), cast(SimcoreS3FileID, file_id) + ) + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights( + user_id=user_id, file_id=directory_file_id if directory_file_id else file_id + ) + if not can.read: + # NOTE: this is tricky. A user with read access can download and data! + # If write permission would be required, then shared projects as views cannot + # recover data in nodes (e.g. jupyter cannot pull work data) + # + raise FileAccessRightError( + access_right="read", + file_id=directory_file_id if directory_file_id else file_id, ) if directory_file_id: if not await get_s3_client(self.app).object_exists( bucket=self.simcore_bucket_name, object_key=f"{file_id}" ): raise S3KeyNotFoundError(key=file_id, bucket=self.simcore_bucket_name) - return await self.__get_link( + return await self._get_link( TypeAdapter(SimcoreS3FileID).validate_python(file_id), link_type ) # standard file link - async with self.engine.acquire() as conn: - fmd = await db_file_meta_data.get( - conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id) - ) + fmd = await FileMetaDataRepository.instance(get_db_engine(self.app)).get( + file_id=TypeAdapter(SimcoreS3FileID).validate_python(file_id) + ) if not is_file_entry_valid(fmd): # try lazy update fmd = await self._update_database_from_storage(fmd) - return await self.__get_link(fmd.object_name, link_type) + return await self._get_link(fmd.object_name, link_type) - @staticmethod - async def __ensure_read_access_rights( - conn: SAConnection, user_id: UserID, storage_file_id: StorageFileID - ) -> None: - can: AccessRights = await get_file_access_rights(conn, user_id, storage_file_id) - if not can.read: - # NOTE: this is tricky. A user with read access can download and data! - # If write permission would be required, then shared projects as views cannot - # recover data in nodes (e.g. jupyter cannot pull work data) - # - raise FileAccessRightError(access_right="read", file_id=storage_file_id) - - async def __get_link( + async def _get_link( self, s3_file_id: SimcoreS3FileID, link_type: LinkType ) -> AnyUrl: link: AnyUrl = TypeAdapter(AnyUrl).validate_python( @@ -499,7 +655,9 @@ async def __get_link( link = await get_s3_client(self.app).create_single_presigned_download_link( bucket=self.simcore_bucket_name, object_key=s3_file_id, - expiration_secs=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + expiration_secs=get_application_settings( + self.app + ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, ) return link @@ -522,11 +680,13 @@ async def delete_file( # Only use this in those circumstances where a collaborator requires to delete a file (the current # permissions model will not allow him to do so, even though this is a legitimate action) # SEE https://github.com/ITISFoundation/osparc-simcore/issues/5159 - async with self.engine.acquire() as conn: - if enforce_access_rights: - can: AccessRights = await get_file_access_rights(conn, user_id, file_id) - if not can.delete: - raise FileAccessRightError(access_right="delete", file_id=file_id) + + if enforce_access_rights: + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights(user_id=user_id, file_id=file_id) + if not can.delete: + raise FileAccessRightError(access_right="delete", file_id=file_id) try: await get_s3_client(self.app).delete_objects_recursively( @@ -538,11 +698,14 @@ async def delete_file( # we still need to clean up the database entry (it exists) # and to invalidate the size of the parent directory - async with self.engine.acquire() as conn: - await db_file_meta_data.delete(conn, [file_id]) + async with transaction_context(get_db_engine(self.app)) as connection: + file_meta_data_repo = FileMetaDataRepository.instance( + get_db_engine(self.app) + ) + await file_meta_data_repo.delete(connection=connection, file_ids=[file_id]) - if parent_dir_fmds := await db_file_meta_data.list_filter_with_partial_file_id( - conn, + if parent_dir_fmds := await file_meta_data_repo.list_filter_with_partial_file_id( + connection=connection, user_or_project_filter=UserOrProjectFilter( user_id=user_id, project_ids=[] ), @@ -553,25 +716,27 @@ async def delete_file( ): parent_dir_fmd = max(parent_dir_fmds, key=lambda fmd: len(fmd.file_id)) parent_dir_fmd.file_size = UNDEFINED_SIZE - await db_file_meta_data.upsert(conn, parent_dir_fmd) + await file_meta_data_repo.upsert( + connection=connection, fmd=parent_dir_fmd + ) async def delete_project_simcore_s3( self, user_id: UserID, project_id: ProjectID, node_id: NodeID | None = None ) -> None: - async with self.engine.acquire() as conn: - can: AccessRights = await get_project_access_rights( - conn, user_id, project_id - ) - if not can.delete: - raise ProjectAccessRightError( - access_right="delete", project_id=project_id - ) - - # we can do it this way, since we are in a transaction, it will rollback in case of error - if not node_id: - await db_file_meta_data.delete_all_from_project(conn, project_id) - else: - await db_file_meta_data.delete_all_from_node(conn, node_id) + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_project_access_rights(user_id=user_id, project_id=project_id) + if not can.delete: + raise ProjectAccessRightError(access_right="delete", project_id=project_id) + + if not node_id: + await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).delete_all_from_project(project_id=project_id) + else: + await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).delete_all_from_node(node_id=node_id) await get_s3_client(self.app).delete_objects_recursively( bucket=self.simcore_bucket_name, @@ -597,16 +762,18 @@ async def deep_copy_project_simcore_s3( "Step 1: check access rights (read of src and write of dst)", ): update_task_progress(task_progress, "Checking study access rights...") - async with self.engine.acquire() as conn: - for prj_uuid in [src_project_uuid, dst_project_uuid]: - if not await db_projects.project_exists(conn, prj_uuid): - raise ProjectNotFoundError(project_id=prj_uuid) - source_access_rights = await get_project_access_rights( - conn, user_id, project_id=src_project_uuid - ) - dest_access_rights = await get_project_access_rights( - conn, user_id, project_id=dst_project_uuid - ) + + for prj_uuid in [src_project_uuid, dst_project_uuid]: + if not await ProjectRepository.instance( + get_db_engine(self.app) + ).project_exists(project_uuid=prj_uuid): + raise ProjectNotFoundError(project_id=prj_uuid) + source_access_rights = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_project_access_rights(user_id=user_id, project_id=src_project_uuid) + dest_access_rights = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_project_access_rights(user_id=user_id, project_id=dst_project_uuid) if not source_access_rights.read: raise ProjectAccessRightError( access_right="read", project_id=src_project_uuid @@ -625,12 +792,10 @@ async def deep_copy_project_simcore_s3( update_task_progress( task_progress, f"Collecting files of '{src_project['name']}'..." ) - async with self.engine.acquire() as conn: - src_project_files: list[ - FileMetaDataAtDB - ] = await db_file_meta_data.list_fmds( - conn, project_ids=[src_project_uuid] - ) + + src_project_files = await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).list_fmds(project_ids=[src_project_uuid]) with log_context( _logger, @@ -743,21 +908,17 @@ async def search_owned_files( limit: int | None = None, offset: int | None = None, ) -> list[FileMetaData]: - async with self.engine.acquire() as conn: - file_metadatas: list[ - FileMetaDataAtDB - ] = await db_file_meta_data.list_filter_with_partial_file_id( - conn, - user_or_project_filter=UserOrProjectFilter( - user_id=user_id, project_ids=[] - ), - file_id_prefix=file_id_prefix, - partial_file_id=None, - is_directory=False, - sha256_checksum=sha256_checksum, - limit=limit, - offset=offset, - ) + file_metadatas = await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).list_filter_with_partial_file_id( + user_or_project_filter=UserOrProjectFilter(user_id=user_id, project_ids=[]), + file_id_prefix=file_id_prefix, + partial_file_id=None, + is_directory=False, + sha256_checksum=sha256_checksum, + limit=limit, + offset=offset, + ) resolved_fmds = [] for fmd in file_metadatas: if is_file_entry_valid(fmd): @@ -771,11 +932,11 @@ async def search_owned_files( async def create_soft_link( self, user_id: int, target_file_id: StorageFileID, link_file_id: StorageFileID ) -> FileMetaData: - async with self.engine.acquire() as conn: - if await db_file_meta_data.exists( - conn, TypeAdapter(SimcoreS3FileID).validate_python(link_file_id) - ): - raise LinkAlreadyExistsError(file_id=link_file_id) + file_meta_data_repo = FileMetaDataRepository.instance(get_db_engine(self.app)) + if await file_meta_data_repo.exists( + file_id=TypeAdapter(SimcoreS3FileID).validate_python(link_file_id) + ): + raise LinkAlreadyExistsError(file_id=link_file_id) # validate target_uuid target = await self.get_file(user_id, target_file_id) # duplicate target and change the following columns: @@ -783,43 +944,13 @@ async def create_soft_link( target.file_id = link_file_id # NOTE: api-server relies on this id target.is_soft_link = True - async with self.engine.acquire() as conn: - return convert_db_to_model(await db_file_meta_data.insert(conn, target)) - - async def synchronise_meta_data_table( - self, *, dry_run: bool - ) -> list[StorageFileID]: + return convert_db_to_model(await file_meta_data_repo.insert(fmd=target)) - async with self.engine.acquire() as conn: - _logger.warning( - "Total number of entries to check %d", - await db_file_meta_data.total(conn), - ) - # iterate over all entries to check if there is a file in the S3 backend - file_ids_to_remove = [ - fmd.file_id - async for fmd in db_file_meta_data.list_valid_uploads(conn) - if not await get_s3_client(self.app).object_exists( - bucket=self.simcore_bucket_name, object_key=fmd.object_name - ) - ] - - if not dry_run: - await db_file_meta_data.delete(conn, file_ids_to_remove) - - _logger.info( - "%s %d entries ", - "Would delete" if dry_run else "Deleted", - len(file_ids_to_remove), - ) - - return cast(list[StorageFileID], file_ids_to_remove) - - async def _clean_pending_upload( - self, conn: SAConnection, file_id: SimcoreS3FileID - ) -> None: + async def _clean_pending_upload(self, file_id: SimcoreS3FileID) -> None: with suppress(FileMetaDataNotFoundError): - fmd = await db_file_meta_data.get(conn, file_id) + fmd = await FileMetaDataRepository.instance(get_db_engine(self.app)).get( + file_id=file_id + ) if is_valid_managed_multipart_upload(fmd.upload_id): assert fmd.upload_id # nosec await get_s3_client(self.app).abort_multipart_upload( @@ -834,11 +965,11 @@ async def _clean_expired_uploads(self) -> None: 1. will try to update the entry from S3 backend if exists 2. will delete the entry if nothing exists in S3 backend. """ - now = arrow.utcnow().datetime - async with self.engine.acquire() as conn: - list_of_expired_uploads = await db_file_meta_data.list_fmds( - conn, expired_after=now - ) + now = datetime.datetime.utcnow() + + list_of_expired_uploads = await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).list_fmds(expired_after=now) if not list_of_expired_uploads: return @@ -916,7 +1047,10 @@ async def clean_expired_uploads(self) -> None: await self._clean_expired_uploads() async def _update_fmd_from_other( - self, conn: SAConnection, *, fmd: FileMetaDataAtDB, copy_from: FileMetaDataAtDB + self, + *, + fmd: FileMetaDataAtDB, + copy_from: FileMetaDataAtDB, ) -> FileMetaDataAtDB: if not fmd.is_directory: s3_metadata = await get_s3_client(self.app).get_object_metadata( @@ -931,10 +1065,10 @@ async def _update_fmd_from_other( fmd.upload_expires_at = None fmd.upload_id = None - updated_fmd: FileMetaDataAtDB = await db_file_meta_data.upsert( - conn, convert_db_to_model(fmd) + + return await FileMetaDataRepository.instance(get_db_engine(self.app)).upsert( + fmd=convert_db_to_model(fmd) ) - return updated_fmd async def _get_s3_metadata( self, fmd: FileMetaDataAtDB @@ -967,11 +1101,10 @@ async def _update_database_from_storage( fmd.file_size = TypeAdapter(ByteSize).validate_python(s3_metadata.size) fmd.upload_expires_at = None fmd.upload_id = None - async with self.engine.acquire() as conn: - updated_fmd: FileMetaDataAtDB = await db_file_meta_data.upsert( - conn, convert_db_to_model(fmd) - ) - return updated_fmd + + return await FileMetaDataRepository.instance(get_db_engine(self.app)).upsert( + fmd=convert_db_to_model(fmd) + ) async def _copy_file_datcore_s3( self, @@ -984,9 +1117,11 @@ async def _copy_file_datcore_s3( ) -> FileMetaData: session = get_client_session(self.app) # 2 steps: Get download link for local copy, then upload to S3 - api_token, api_secret = await db_tokens.get_api_token_and_secret( - self.app, user_id - ) + api_token, api_secret = await TokenRepository.instance( + get_db_engine(self.app) + ).get_api_token_and_secret(user_id=user_id) + assert api_token # nosec + assert api_secret # nosec dc_link = await datcore_adapter.get_file_download_presigned_link( self.app, api_token, api_secret, source_uuid ) @@ -1003,15 +1138,13 @@ async def _copy_file_datcore_s3( await download_to_file_or_raise(session, f"{dc_link}", local_file_path) # copying will happen using aioboto3, therefore multipart might happen - async with self.engine.acquire() as conn: - new_fmd = await self._create_fmd_for_upload( - conn, - user_id, - dst_file_id, - upload_id=S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, - is_directory=False, - sha256_checksum=None, - ) + new_fmd = await self._create_fmd_for_upload( + user_id, + dst_file_id, + upload_id=S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, + is_directory=False, + sha256_checksum=None, + ) # Uploads local -> S3 await get_s3_client(self.app).upload_file( bucket=self.simcore_bucket_name, @@ -1042,15 +1175,13 @@ async def _copy_path_s3_s3( ): # copying will happen using aioboto3, therefore multipart might happen # NOTE: connection must be released to ensure database update - async with self.engine.acquire() as conn: - new_fmd = await self._create_fmd_for_upload( - conn, - user_id, - dst_file_id, - upload_id=S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, - is_directory=src_fmd.is_directory, - sha256_checksum=src_fmd.sha256_checksum, - ) + new_fmd = await self._create_fmd_for_upload( + user_id, + dst_file_id, + upload_id=S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, + is_directory=src_fmd.is_directory, + sha256_checksum=src_fmd.sha256_checksum, + ) s3_client = get_s3_client(self.app) @@ -1069,15 +1200,13 @@ async def _copy_path_s3_s3( bytes_transfered_cb=bytes_transfered_cb, ) # we are done, let's update the copy with the src - async with self.engine.acquire() as conn: - updated_fmd = await self._update_fmd_from_other( - conn, fmd=new_fmd, copy_from=src_fmd - ) + updated_fmd = await self._update_fmd_from_other( + fmd=new_fmd, copy_from=src_fmd + ) return convert_db_to_model(updated_fmd) async def _create_fmd_for_upload( self, - conn: SAConnection, user_id: UserID, file_id: StorageFileID, upload_id: UploadID | None, @@ -1085,9 +1214,11 @@ async def _create_fmd_for_upload( is_directory: bool, sha256_checksum: SHA256Str | None, ) -> FileMetaDataAtDB: - now = arrow.utcnow().datetime + now = datetime.datetime.utcnow() upload_expiration_date = now + datetime.timedelta( - seconds=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS + seconds=get_application_settings( + self.app + ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS ) fmd = FileMetaData.from_simcore_node( user_id=user_id, @@ -1100,17 +1231,18 @@ async def _create_fmd_for_upload( is_directory=is_directory, sha256_checksum=sha256_checksum, ) - return await db_file_meta_data.upsert(conn, fmd) + + return await FileMetaDataRepository.instance(get_db_engine(self.app)).upsert( + fmd=fmd + ) -def create_simcore_s3_data_manager(app: web.Application) -> SimcoreS3DataManager: - cfg: Settings = app[APP_CONFIG_KEY] +def create_simcore_s3_data_manager(app: FastAPI) -> SimcoreS3DataManager: + cfg = get_application_settings(app) assert cfg.STORAGE_S3 # nosec return SimcoreS3DataManager( - engine=app[APP_AIOPG_ENGINE_KEY], simcore_bucket_name=TypeAdapter(S3BucketName).validate_python( cfg.STORAGE_S3.S3_BUCKET_NAME ), app=app, - settings=cfg, ) diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py deleted file mode 100644 index eb5f2f1240c..00000000000 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm_utils.py +++ /dev/null @@ -1,126 +0,0 @@ -from contextlib import suppress -from pathlib import Path - -from aiopg.sa.connection import SAConnection -from aws_library.s3 import S3MetaData, SimcoreS3API -from models_library.api_schemas_storage import S3BucketName -from models_library.projects_nodes_io import ( - SimcoreS3DirectoryID, - SimcoreS3FileID, - StorageFileID, -) -from pydantic import ByteSize, NonNegativeInt, TypeAdapter -from servicelib.utils import ensure_ends_with - -from . import db_file_meta_data -from .exceptions import FileMetaDataNotFoundError -from .models import FileMetaData, FileMetaDataAtDB -from .utils import convert_db_to_model - - -async def _list_all_files_in_folder( - *, - s3_client: SimcoreS3API, - bucket: S3BucketName, - prefix: str, - max_files_to_list: int, -) -> list[S3MetaData]: - async for s3_objects in s3_client.list_objects_paginated( - bucket, prefix, items_per_page=max_files_to_list - ): - # NOTE: stop immediately after listing after `max_files_to_list` - return s3_objects - return [] - - -async def expand_directory( - s3_client: SimcoreS3API, - simcore_bucket_name: S3BucketName, - fmd: FileMetaDataAtDB, - max_items_to_include: NonNegativeInt, -) -> list[FileMetaData]: - """ - Scans S3 backend and returns a list S3MetaData entries which get mapped - to FileMetaData entry. - """ - files_in_folder: list[S3MetaData] = await _list_all_files_in_folder( - s3_client=s3_client, - bucket=simcore_bucket_name, - prefix=ensure_ends_with(fmd.file_id, "/"), - max_files_to_list=max_items_to_include, - ) - result: list[FileMetaData] = [ - convert_db_to_model( - FileMetaDataAtDB( - location_id=fmd.location_id, - location=fmd.location, - bucket_name=fmd.bucket_name, - object_name=x.object_key, - user_id=fmd.user_id, - # NOTE: to ensure users have a consistent experience the - # `created_at` field is inherited from the last_modified - # coming from S3. This way if a file is created 1 month after the - # creation of the directory, the file's creation date - # will not be 1 month in the passed. - created_at=x.last_modified, - file_id=x.object_key, - file_size=TypeAdapter(ByteSize).validate_python(x.size), - last_modified=x.last_modified, - entity_tag=x.e_tag, - is_soft_link=False, - is_directory=False, - project_id=fmd.project_id, - node_id=fmd.node_id, - ) - ) - for x in files_in_folder - ] - return result - - -def get_simcore_directory(file_id: SimcoreS3FileID) -> str: - try: - directory_id = SimcoreS3DirectoryID.from_simcore_s3_object(file_id) - except ValueError: - return "" - return f"{Path(directory_id)}" - - -async def get_directory_file_id( - conn: SAConnection, file_id: SimcoreS3FileID -) -> SimcoreS3FileID | None: - """ - returns the containing file's `directory_file_id` if the entry exists - in the `file_meta_data` table - """ - - async def _get_fmd( - conn: SAConnection, s3_file_id: StorageFileID - ) -> FileMetaDataAtDB | None: - with suppress(FileMetaDataNotFoundError): - return await db_file_meta_data.get( - conn, TypeAdapter(SimcoreS3FileID).validate_python(s3_file_id) - ) - return None - - provided_file_id_fmd = await _get_fmd(conn, file_id) - if provided_file_id_fmd: - # file_meta_data exists it is not a directory - return None - - directory_file_id_str: str = get_simcore_directory(file_id) - if directory_file_id_str == "": - # could not extract a directory name from the provided path - return None - - directory_file_id = TypeAdapter(SimcoreS3FileID).validate_python( - directory_file_id_str - ) - directory_file_id_fmd = await _get_fmd(conn, directory_file_id) - - return directory_file_id if directory_file_id_fmd else None - - -def compute_file_id_prefix(file_id: str, levels: int): - components = file_id.strip("/").split("/") - return "/".join(components[:levels]) diff --git a/services/storage/src/simcore_service_storage/utils/__init__.py b/services/storage/src/simcore_service_storage/utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/s3_utils.py b/services/storage/src/simcore_service_storage/utils/s3_utils.py similarity index 100% rename from services/storage/src/simcore_service_storage/s3_utils.py rename to services/storage/src/simcore_service_storage/utils/s3_utils.py diff --git a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py new file mode 100644 index 00000000000..a50bdbff1ed --- /dev/null +++ b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py @@ -0,0 +1,225 @@ +from contextlib import suppress +from pathlib import Path + +import orjson +from aws_library.s3 import S3MetaData, SimcoreS3API +from models_library.api_schemas_storage.storage_schemas import S3BucketName +from models_library.projects import ProjectID +from models_library.projects_nodes_io import ( + SimcoreS3DirectoryID, + SimcoreS3FileID, + StorageFileID, +) +from models_library.users import UserID +from pydantic import ByteSize, NonNegativeInt, TypeAdapter +from servicelib.utils import ensure_ends_with +from sqlalchemy.ext.asyncio import AsyncEngine + +from ..exceptions.errors import FileMetaDataNotFoundError, ProjectAccessRightError +from ..models import FileMetaData, FileMetaDataAtDB, GenericCursor, PathMetaData +from ..modules.db.access_layer import AccessLayerRepository +from ..modules.db.file_meta_data import FileMetaDataRepository, TotalChildren +from .utils import convert_db_to_model + + +async def _list_all_files_in_folder( + *, + s3_client: SimcoreS3API, + bucket: S3BucketName, + prefix: str, + max_files_to_list: int, +) -> list[S3MetaData]: + async for s3_objects in s3_client.list_objects_paginated( + bucket, prefix, items_per_page=max_files_to_list + ): + # NOTE: stop immediately after listing after `max_files_to_list` + return s3_objects + return [] + + +async def expand_directory( + s3_client: SimcoreS3API, + simcore_bucket_name: S3BucketName, + fmd: FileMetaDataAtDB, + max_items_to_include: NonNegativeInt, +) -> list[FileMetaData]: + """ + Scans S3 backend and returns a list S3MetaData entries which get mapped + to FileMetaData entry. + """ + files_in_folder: list[S3MetaData] = await _list_all_files_in_folder( + s3_client=s3_client, + bucket=simcore_bucket_name, + prefix=ensure_ends_with(fmd.file_id, "/"), + max_files_to_list=max_items_to_include, + ) + result: list[FileMetaData] = [ + convert_db_to_model( + FileMetaDataAtDB( + location_id=fmd.location_id, + location=fmd.location, + bucket_name=fmd.bucket_name, + object_name=x.object_key, + user_id=fmd.user_id, + # NOTE: to ensure users have a consistent experience the + # `created_at` field is inherited from the last_modified + # coming from S3. This way if a file is created 1 month after the + # creation of the directory, the file's creation date + # will not be 1 month in the passed. + created_at=x.last_modified, + file_id=x.object_key, + file_size=TypeAdapter(ByteSize).validate_python(x.size), + last_modified=x.last_modified, + entity_tag=x.e_tag, + is_soft_link=False, + is_directory=False, + project_id=fmd.project_id, + node_id=fmd.node_id, + ) + ) + for x in files_in_folder + ] + return result + + +def get_simcore_directory(file_id: SimcoreS3FileID) -> str: + try: + directory_id = SimcoreS3DirectoryID.from_simcore_s3_object(file_id) + except ValueError: + return "" + return f"{Path(directory_id)}" + + +async def _try_get_fmd( + db_engine: AsyncEngine, s3_file_id: StorageFileID +) -> FileMetaDataAtDB | None: + with suppress(FileMetaDataNotFoundError): + return await FileMetaDataRepository.instance(db_engine).get( + file_id=TypeAdapter(SimcoreS3FileID).validate_python(s3_file_id) + ) + return None + + +async def get_directory_file_id( + db_engine: AsyncEngine, file_id: SimcoreS3FileID +) -> SimcoreS3FileID | None: + """ + returns the containing file's `directory_file_id` if the entry exists + in the `file_meta_data` table + """ + + provided_file_id_fmd = await _try_get_fmd(db_engine, file_id) + if provided_file_id_fmd: + # file_meta_data exists it is not a directory + return None + + directory_file_id_str: str = get_simcore_directory(file_id) + if directory_file_id_str == "": + # could not extract a directory name from the provided path + return None + + directory_file_id = TypeAdapter(SimcoreS3FileID).validate_python( + directory_file_id_str + ) + directory_file_id_fmd = await _try_get_fmd(db_engine, directory_file_id) + + return directory_file_id if directory_file_id_fmd else None + + +def compute_file_id_prefix(file_id: str, levels: int): + components = file_id.strip("/").split("/") + return "/".join(components[:levels]) + + +async def list_child_paths_from_s3( + s3_client: SimcoreS3API, + *, + dir_fmd: FileMetaData, + bucket: S3BucketName, + file_filter: Path, + limit: int, + cursor: GenericCursor | None, +) -> tuple[list[PathMetaData], GenericCursor | None]: + """list direct children given by `file_filter` of a directory. + Tries first using file_filter as a full path, if not results are found will + try using file_filter as a partial prefix. + """ + objects_cursor = None + if cursor is not None: + cursor_params = orjson.loads(cursor) + assert cursor_params["file_filter"] == f"{file_filter}" # nosec + objects_cursor = cursor_params["objects_next_cursor"] + list_s3_objects, objects_next_cursor = await s3_client.list_objects( + bucket=bucket, + prefix=file_filter, + start_after=None, + limit=limit, + next_cursor=objects_cursor, + is_partial_prefix=False, + ) + if not list_s3_objects: + list_s3_objects, objects_next_cursor = await s3_client.list_objects( + bucket=bucket, + prefix=file_filter, + start_after=None, + limit=limit, + next_cursor=objects_cursor, + is_partial_prefix=True, + ) + + paths_metadata = [ + PathMetaData.from_s3_object_in_dir(s3_object, dir_fmd) + for s3_object in list_s3_objects + ] + next_cursor = None + if objects_next_cursor: + next_cursor = orjson.dumps( + { + "file_filter": f"{file_filter}", + "objects_next_cursor": objects_next_cursor, + } + ) + + return paths_metadata, next_cursor + + +async def list_child_paths_from_repository( + db_engine: AsyncEngine, + *, + filter_by_project_ids: list[ProjectID] | None, + filter_by_file_prefix: Path | None, + cursor: GenericCursor | None, + limit: int, +) -> tuple[list[PathMetaData], GenericCursor | None, TotalChildren]: + file_meta_data_repo = FileMetaDataRepository.instance(db_engine) + paths_metadata, next_cursor, total = await file_meta_data_repo.list_child_paths( + filter_by_project_ids=filter_by_project_ids, + filter_by_file_prefix=filter_by_file_prefix, + limit=limit, + cursor=cursor, + is_partial_prefix=False, + ) + if not paths_metadata: + paths_metadata, next_cursor, total = await file_meta_data_repo.list_child_paths( + filter_by_project_ids=filter_by_project_ids, + filter_by_file_prefix=filter_by_file_prefix, + limit=limit, + cursor=cursor, + is_partial_prefix=True, + ) + + return paths_metadata, next_cursor, total + + +async def get_accessible_project_ids( + db_engine: AsyncEngine, *, user_id: UserID, project_id: ProjectID | None +) -> list[ProjectID]: + access_layer_repo = AccessLayerRepository.instance(db_engine) + if project_id: + project_access_rights = await access_layer_repo.get_project_access_rights( + user_id=user_id, project_id=project_id + ) + if not project_access_rights.read: + raise ProjectAccessRightError(access_right="read", project_id=project_id) + return [project_id] + return await access_layer_repo.get_readable_project_ids(user_id=user_id) diff --git a/services/storage/src/simcore_service_storage/utils.py b/services/storage/src/simcore_service_storage/utils/utils.py similarity index 73% rename from services/storage/src/simcore_service_storage/utils.py rename to services/storage/src/simcore_service_storage/utils/utils.py index 7abc18ed552..36fef50d268 100644 --- a/services/storage/src/simcore_service_storage/utils.py +++ b/services/storage/src/simcore_service_storage/utils/utils.py @@ -1,15 +1,16 @@ +import hashlib import logging -import urllib.parse from pathlib import Path import aiofiles -from aiohttp import ClientSession +import httpx from aiohttp.typedefs import StrOrURL +from aws_library.s3 import UploadID from models_library.projects_nodes_io import StorageFileID from models_library.users import UserID -from .constants import MAX_CHUNK_SIZE, S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID -from .models import FileMetaData, FileMetaDataAtDB, UploadID +from ..constants import MAX_CHUNK_SIZE, S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID +from ..models import FileMetaData, FileMetaDataAtDB logger = logging.getLogger(__name__) @@ -26,7 +27,7 @@ def convert_db_to_model(x: FileMetaDataAtDB) -> FileMetaData: async def download_to_file_or_raise( - session: ClientSession, + session: httpx.AsyncClient, url: StrOrURL, destination_path: Path, *, @@ -46,12 +47,13 @@ async def download_to_file_or_raise( dest_file = Path(destination_path) total_size = 0 - async with session.get(url, raise_for_status=True) as response: - dest_file.parent.mkdir(parents=True, exist_ok=True) - async with aiofiles.open(dest_file, mode="wb") as fh: - async for chunk in response.content.iter_chunked(chunk_size): - await fh.write(chunk) - total_size += len(chunk) + response = await session.get(f"{url}") + response.raise_for_status() + dest_file.parent.mkdir(parents=True, exist_ok=True) + async with aiofiles.open(dest_file, mode="wb") as fh: + async for chunk in response.aiter_bytes(chunk_size): + await fh.write(chunk) + total_size += len(chunk) return total_size @@ -67,7 +69,8 @@ def is_file_entry_valid(file_metadata: FileMetaData | FileMetaDataAtDB) -> bool: def create_upload_completion_task_name(user_id: UserID, file_id: StorageFileID) -> str: - return f"upload_complete_task_{user_id}_{urllib.parse.quote(file_id, safe='')}" + the_hash = hashlib.sha256(f"{user_id}_{file_id}".encode()).hexdigest() + return f"upload_complete_task_{the_hash}" def is_valid_managed_multipart_upload(upload_id: UploadID | None) -> bool: diff --git a/services/storage/src/simcore_service_storage/utils_handlers.py b/services/storage/src/simcore_service_storage/utils_handlers.py deleted file mode 100644 index e0438cc0c92..00000000000 --- a/services/storage/src/simcore_service_storage/utils_handlers.py +++ /dev/null @@ -1,53 +0,0 @@ -import logging - -from aiohttp import web -from aiohttp.typedefs import Handler -from aiohttp.web_request import Request -from aws_library.s3 import S3AccessError, S3KeyNotFoundError -from pydantic import ValidationError -from servicelib.aiohttp.aiopg_utils import DBAPIError - -from .datcore_adapter.datcore_adapter_exceptions import DatcoreAdapterTimeoutError -from .db_access_layer import InvalidFileIdentifierError -from .exceptions import ( - FileAccessRightError, - FileMetaDataNotFoundError, - LinkAlreadyExistsError, - ProjectAccessRightError, - ProjectNotFoundError, -) - -_logger = logging.getLogger(__name__) - - -@web.middleware -async def dsm_exception_handler( - request: Request, handler: Handler -) -> web.StreamResponse: - try: - return await handler(request) - except InvalidFileIdentifierError as err: - raise web.HTTPUnprocessableEntity( - reason=f"{err} is an invalid file identifier" - ) from err - except (FileMetaDataNotFoundError, S3KeyNotFoundError, ProjectNotFoundError) as err: - raise web.HTTPNotFound(reason=f"{err}") from err - except (FileAccessRightError, ProjectAccessRightError) as err: - raise web.HTTPForbidden(reason=f"{err}") from err - except LinkAlreadyExistsError as err: - raise web.HTTPUnprocessableEntity(reason=f"{err}") from err - except ValidationError as err: - raise web.HTTPUnprocessableEntity(reason=f"{err}") from err - except DBAPIError as err: - _logger.exception("Unexpected error while accessing DB:") - raise web.HTTPServiceUnavailable( - reason=f"Unexpected error while accessing the database: {err}" - ) from err - except S3AccessError as err: - _logger.exception("Unexpected error while accessing S3:") - raise web.HTTPServiceUnavailable( - reason=f"Unexpected error while accessing S3 backend: {err}" - ) from err - except DatcoreAdapterTimeoutError as err: - _logger.exception("Unexpected error while accessing Datcore-Adapter:") - raise web.HTTPGatewayTimeout(reason=f"{err}") from err diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 8dc4a95b488..03c73f5bfd0 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -8,67 +8,82 @@ import asyncio import logging +import random import sys -import urllib.parse from collections.abc import AsyncIterator, Awaitable, Callable -from contextlib import AbstractAsyncContextManager, asynccontextmanager from pathlib import Path -from typing import cast +from typing import Any, Final, cast -import aioresponses -import dotenv +import httpx import pytest +import respx import simcore_service_storage -from aiohttp.test_utils import TestClient -from aiopg.sa import Engine +from asgi_lifespan import LifespanManager from aws_library.s3 import SimcoreS3API from faker import Faker from fakeredis.aioredis import FakeRedis -from models_library.api_schemas_storage import ( +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, FileUploadCompleteState, FileUploadCompletionBody, FileUploadSchema, + LinkType, UploadedPart, ) from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes import NodeID -from models_library.projects_nodes_io import LocationID, SimcoreS3FileID +from models_library.projects_nodes_io import LocationID, SimcoreS3FileID, StorageFileID from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status from pytest_simcore.helpers.logging_tools import log_context -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict, setenvs_from_dict from pytest_simcore.helpers.s3 import upload_file_to_presigned_link +from pytest_simcore.helpers.storage_utils import ( + FileIDDict, + ProjectWithFilesParams, + get_updated_project, +) +from pytest_simcore.helpers.storage_utils_file_meta_data import ( + assert_file_meta_data_in_db, +) from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status -from settings_library.s3 import S3Settings +from servicelib.utils import limited_gather +from settings_library.rabbit import RabbitSettings +from simcore_postgres_database.models.tokens import tokens from simcore_postgres_database.storage_models import file_meta_data, projects, users -from simcore_service_storage.application import create +from simcore_service_storage.core.application import create_app +from simcore_service_storage.core.settings import ApplicationSettings +from simcore_service_storage.datcore_dsm import DatCoreDataManager from simcore_service_storage.dsm import get_dsm_provider -from simcore_service_storage.handlers_files import UPLOAD_TASKS_KEY -from simcore_service_storage.models import S3BucketName -from simcore_service_storage.s3 import get_s3_client -from simcore_service_storage.settings import Settings +from simcore_service_storage.models import FileMetaData, FileMetaDataAtDB, S3BucketName +from simcore_service_storage.modules.long_running_tasks import ( + get_completed_upload_tasks, +) +from simcore_service_storage.modules.s3 import get_s3_client from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager +from sqlalchemy import literal_column +from sqlalchemy.ext.asyncio import AsyncEngine from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -from tests.helpers.utils_file_meta_data import assert_file_meta_data_in_db from types_aiobotocore_s3 import S3Client from yarl import URL pytest_plugins = [ - "pytest_simcore.aioresponses_mocker", "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", "pytest_simcore.cli_runner", + "pytest_simcore.disk_usage_monitoring", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", @@ -78,9 +93,11 @@ "pytest_simcore.openapi_specs", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", + "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", - "tests.fixtures.data_models", - "tests.fixtures.datcore_adapter", + "pytest_simcore.simcore_storage_data_models", + "pytest_simcore.simcore_storage_datcore_adapter", + "pytest_simcore.simcore_storage_service", ] CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -109,82 +126,79 @@ def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: return service_folder -@pytest.fixture(scope="session") -def project_env_devel_dict(project_slug_dir: Path) -> dict[str, str | None]: - env_devel_file = project_slug_dir / ".env-devel" - assert env_devel_file.exists() - return dotenv.dotenv_values(env_devel_file, verbose=True, interpolate=True) - - -@pytest.fixture -def project_env_devel_environment( - project_env_devel_dict: dict[str, str], monkeypatch: pytest.MonkeyPatch -) -> None: - for key, value in project_env_devel_dict.items(): - monkeypatch.setenv(key, value) - - -## FAKE DATA FIXTURES ---------------------------------------------- - - @pytest.fixture -async def cleanup_user_projects_file_metadata(aiopg_engine: Engine): +async def cleanup_user_projects_file_metadata(sqlalchemy_async_engine: AsyncEngine): yield # cleanup - async with aiopg_engine.acquire() as conn: + async with sqlalchemy_async_engine.begin() as conn: await conn.execute(file_meta_data.delete()) await conn.execute(projects.delete()) await conn.execute(users.delete()) @pytest.fixture -def simcore_s3_dsm(client: TestClient) -> SimcoreS3DataManager: - assert client.app +def simcore_s3_dsm(initialized_app: FastAPI) -> SimcoreS3DataManager: return cast( SimcoreS3DataManager, - get_dsm_provider(client.app).get(SimcoreS3DataManager.get_location_id()), + get_dsm_provider(initialized_app).get(SimcoreS3DataManager.get_location_id()), ) @pytest.fixture -async def storage_s3_client( - client: TestClient, -) -> SimcoreS3API: - assert client.app - return get_s3_client(client.app) +async def storage_s3_client(initialized_app: FastAPI) -> SimcoreS3API: + return get_s3_client(initialized_app) @pytest.fixture -async def storage_s3_bucket(app_settings: Settings) -> str: +async def storage_s3_bucket(app_settings: ApplicationSettings) -> str: assert app_settings.STORAGE_S3 return app_settings.STORAGE_S3.S3_BUCKET_NAME +@pytest.fixture +async def mock_rabbit_setup(mocker: MockerFixture) -> MockerFixture: + mocker.patch("simcore_service_storage.core.application.setup_rabbitmq") + mocker.patch("simcore_service_storage.core.application.setup_rpc_api_routes") + return mocker + + +@pytest.fixture +def app_environment( + mock_env_devel_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + external_envfile_dict: EnvVarsDict, +) -> EnvVarsDict: + if external_envfile_dict: + delenvs_from_dict(monkeypatch, mock_env_devel_environment, raising=False) + return setenvs_from_dict(monkeypatch, {**external_envfile_dict}) + + envs = setenvs_from_dict(monkeypatch, {}) + return mock_env_devel_environment | envs + + +@pytest.fixture +def disabled_rabbitmq(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv("STORAGE_RABBITMQ", "null") + + +@pytest.fixture +def enabled_rabbitmq( + app_environment: EnvVarsDict, rabbit_service: RabbitSettings +) -> RabbitSettings: + return rabbit_service + + @pytest.fixture def app_settings( - aiopg_engine: Engine, + app_environment: EnvVarsDict, + enabled_rabbitmq: RabbitSettings, + sqlalchemy_async_engine: AsyncEngine, postgres_host_config: dict[str, str], mocked_s3_server_envs: EnvVarsDict, - external_envfile_dict: EnvVarsDict, - datcore_adapter_service_mock: aioresponses.aioresponses, - monkeypatch: pytest.MonkeyPatch, -) -> Settings: - s3_settings_dict = {} - if external_envfile_dict: - s3_settings = S3Settings.create_from_envs(**external_envfile_dict) - if s3_settings.S3_ENDPOINT is None: - monkeypatch.delenv("S3_ENDPOINT") - s3_settings_dict = s3_settings.model_dump(exclude={"S3_ENDPOINT"}) - else: - s3_settings_dict = s3_settings.model_dump() - setenvs_from_dict( - monkeypatch, - { - **s3_settings_dict, - "STORAGE_TRACING": "null", - }, - ) - test_app_settings = Settings.create_from_envs() + datcore_adapter_service_mock: respx.MockRouter, + mocked_redis_server, +) -> ApplicationSettings: + test_app_settings = ApplicationSettings.create_from_envs() print(f"{test_app_settings.model_dump_json(indent=2)=}") return test_app_settings @@ -195,18 +209,30 @@ async def mocked_redis_server(mocker: MockerFixture) -> None: mocker.patch("redis.asyncio.from_url", return_value=mock_redis) +_LIFESPAN_TIMEOUT: Final[int] = 10 + + @pytest.fixture -def client( - event_loop: asyncio.AbstractEventLoop, - aiohttp_client: Callable, - unused_tcp_port_factory: Callable[..., int], - app_settings: Settings, - mocked_redis_server, -) -> TestClient: - app = create(app_settings) - return event_loop.run_until_complete( - aiohttp_client(app, server_kwargs={"port": unused_tcp_port_factory()}) - ) +async def initialized_app(app_settings: ApplicationSettings) -> AsyncIterator[FastAPI]: + settings = ApplicationSettings.create_from_envs() + app = create_app(settings) + # NOTE: the timeout is sometime too small for CI machines, and even larger machines + async with LifespanManager( + app, startup_timeout=_LIFESPAN_TIMEOUT, shutdown_timeout=_LIFESPAN_TIMEOUT + ): + yield app + + +@pytest.fixture +async def client( + initialized_app: FastAPI, +) -> AsyncIterator[httpx.AsyncClient]: + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=initialized_app), + base_url=f"http://{initialized_app.title}.testserver.io", + headers={"Content-Type": "application/json"}, + ) as client: + yield client @pytest.fixture @@ -233,11 +259,11 @@ def simcore_file_id( @pytest.fixture( params=[ SimcoreS3DataManager.get_location_id(), - # DatCoreDataManager.get_location_id(), + DatCoreDataManager.get_location_id(), ], ids=[ SimcoreS3DataManager.get_location_name(), - # DatCoreDataManager.get_location_name(), + DatCoreDataManager.get_location_name(), ], ) def location_id(request: pytest.FixtureRequest) -> LocationID: @@ -246,23 +272,25 @@ def location_id(request: pytest.FixtureRequest) -> LocationID: @pytest.fixture async def get_file_meta_data( - client: TestClient, user_id: UserID, location_id: LocationID + initialized_app: FastAPI, + client: httpx.AsyncClient, + user_id: UserID, + location_id: LocationID, ) -> Callable[..., Awaitable[FileMetaDataGet]]: async def _getter(file_id: SimcoreS3FileID) -> FileMetaDataGet: - assert client.app - url = ( - client.app.router["get_file_metadata"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(file_id, safe=""), - ) - .with_query(user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "get_file_metadata", + location_id=f"{location_id}", + file_id=file_id, + ).with_query(user_id=user_id) + response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + received_fmd, error = assert_status( + response, status.HTTP_200_OK, FileMetaDataGet + ) assert not error - assert data - received_fmd = TypeAdapter(FileMetaDataGet).validate_python(data) assert received_fmd return received_fmd @@ -271,30 +299,31 @@ async def _getter(file_id: SimcoreS3FileID) -> FileMetaDataGet: @pytest.fixture async def create_upload_file_link_v2( - client: TestClient, user_id: UserID, location_id: LocationID + initialized_app: FastAPI, + client: httpx.AsyncClient, + user_id: UserID, + location_id: LocationID, ) -> AsyncIterator[Callable[..., Awaitable[FileUploadSchema]]]: file_params: list[tuple[UserID, int, SimcoreS3FileID]] = [] async def _link_creator( file_id: SimcoreS3FileID, **query_kwargs ) -> FileUploadSchema: - assert client.app - url = ( - client.app.router["upload_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(file_id, safe=""), - ) - .with_query(**query_kwargs, user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "upload_file", + location_id=f"{location_id}", + file_id=file_id, + ).with_query(**query_kwargs, user_id=user_id) assert ( "file_size" in url.query ), "V2 call to upload file must contain file_size field!" response = await client.put(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + received_file_upload, error = assert_status( + response, status.HTTP_200_OK, FileUploadSchema + ) assert not error - assert data - received_file_upload = TypeAdapter(FileUploadSchema).validate_python(data) assert received_file_upload file_params.append((user_id, location_id, file_id)) return received_file_upload @@ -302,27 +331,26 @@ async def _link_creator( yield _link_creator # cleanup - assert client.app clean_tasks = [] for u_id, loc_id, file_id in file_params: - url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{loc_id}", - file_id=urllib.parse.quote(file_id, safe=""), - ) - .with_query(user_id=u_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "delete_file", + location_id=f"{loc_id}", + file_id=file_id, + ).with_query(user_id=u_id) clean_tasks.append(client.delete(f"{url}")) await asyncio.gather(*clean_tasks) @pytest.fixture def upload_file( - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, project_id: ProjectID, node_id: NodeID, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], @@ -338,7 +366,6 @@ async def _uploader( sha256_checksum: SHA256Str | None = None, project_id: ProjectID = project_id, ) -> tuple[Path, SimcoreS3FileID]: - assert client.app # create a file file = create_file_of_size(file_size, file_name) if not file_id: @@ -348,7 +375,10 @@ async def _uploader( if sha256_checksum: query_params["sha256_checksum"] = f"{sha256_checksum}" file_upload_link = await create_upload_file_link_v2( - file_id, link_type="presigned", file_size=file_size, **query_params + file_id, + link_type=LinkType.PRESIGNED.value, + file_size=file_size, + **query_params, ) # upload the file @@ -363,12 +393,11 @@ async def _uploader( json=jsonable_encoder(FileUploadCompletionBody(parts=part_to_etag)), ) response.raise_for_status() - data, error = await assert_status(response, status.HTTP_202_ACCEPTED) - assert not error - assert data - file_upload_complete_response = FileUploadCompleteResponse.model_validate( - data + file_upload_complete_response, error = assert_status( + response, status.HTTP_202_ACCEPTED, FileUploadCompleteResponse ) + assert not error + assert file_upload_complete_response state_url = URL(f"{file_upload_complete_response.links.state}").relative() completion_etag = None @@ -378,18 +407,22 @@ async def _uploader( stop=stop_after_delay(60), retry=retry_if_exception_type(ValueError), ): - with attempt, log_context( - logging.INFO, - f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", - ) as ctx: + with ( + attempt, + log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx, + ): response = await client.post(f"{state_url}") response.raise_for_status() - data, error = await assert_status(response, status.HTTP_200_OK) + future, error = assert_status( + response, status.HTTP_200_OK, FileUploadCompleteFutureResponse + ) assert not error - assert data - future = FileUploadCompleteFutureResponse.model_validate(data) + assert future if future.state == FileUploadCompleteState.NOK: - msg = f"{data=}" + msg = f"{future=}" raise ValueError(msg) assert future.state == FileUploadCompleteState.OK assert future.e_tag is not None @@ -401,7 +434,7 @@ async def _uploader( # check the entry in db now has the correct file size, and the upload id is gone await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=file_id, expected_entry_exists=True, expected_file_size=file_size, @@ -421,25 +454,6 @@ async def _uploader( return _uploader -@pytest.fixture -def create_simcore_file_id( - faker: Faker, -) -> Callable[[ProjectID, NodeID, str, Path | None], SimcoreS3FileID]: - def _creator( - project_id: ProjectID, - node_id: NodeID, - file_name: str, - file_base_path: Path | None = None, - ) -> SimcoreS3FileID: - s3_file_name = file_name - if file_base_path: - s3_file_name = f"{file_base_path / file_name}" - clean_path = Path(f"{project_id}/{node_id}/{s3_file_name}") - return TypeAdapter(SimcoreS3FileID).validate_python(f"{clean_path}") - - return _creator - - @pytest.fixture async def with_versioning_enabled( s3_client: S3Client, @@ -455,11 +469,12 @@ async def with_versioning_enabled( async def create_empty_directory( create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], - client: TestClient, - project_id: ProjectID, - node_id: NodeID, -) -> Callable[..., Awaitable[FileUploadSchema]]: - async def _directory_creator(dir_name: str): + initialized_app: FastAPI, + client: httpx.AsyncClient, +) -> Callable[[str, ProjectID, NodeID], Awaitable[SimcoreS3FileID]]: + async def _directory_creator( + dir_name: str, project_id: ProjectID, node_id: NodeID + ) -> SimcoreS3FileID: # creating an empty directory goes through the same procedure as uploading a multipart file # done by using 3 calls: # 1. create the link as a directory @@ -467,8 +482,8 @@ async def _directory_creator(dir_name: str): # 3. call file_upload_complete_response until it replies OK directory_file_id = create_simcore_file_id(project_id, node_id, dir_name) - directory_file_upload: FileUploadSchema = await create_upload_file_link_v2( - directory_file_id, link_type="s3", is_directory="true", file_size=-1 + directory_file_upload = await create_upload_file_link_v2( + directory_file_id, link_type="S3", is_directory="true", file_size=0 ) # always returns a v2 link when dealing with directories assert isinstance(directory_file_upload, FileUploadSchema) @@ -481,15 +496,15 @@ async def _directory_creator(dir_name: str): json=jsonable_encoder(FileUploadCompletionBody(parts=[])), ) response.raise_for_status() - data, error = await assert_status(response, status.HTTP_202_ACCEPTED) + file_upload_complete_response, error = assert_status( + response, status.HTTP_202_ACCEPTED, FileUploadCompleteResponse + ) assert not error - assert data - file_upload_complete_response = FileUploadCompleteResponse.model_validate(data) + assert file_upload_complete_response state_url = URL(f"{file_upload_complete_response.links.state}").relative() # check that it finished updating - assert client.app - client.app[UPLOAD_TASKS_KEY].clear() + get_completed_upload_tasks(initialized_app).clear() # now check for the completion async for attempt in AsyncRetrying( reraise=True, @@ -497,15 +512,19 @@ async def _directory_creator(dir_name: str): stop=stop_after_delay(60), retry=retry_if_exception_type(AssertionError), ): - with attempt, log_context( - logging.INFO, - f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", - ) as ctx: + with ( + attempt, + log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx, + ): response = await client.post(f"{state_url}") - data, error = await assert_status(response, status.HTTP_200_OK) + future, error = assert_status( + response, status.HTTP_200_OK, FileUploadCompleteFutureResponse + ) assert not error - assert data - future = FileUploadCompleteFutureResponse.model_validate(data) + assert future assert future.state == FileUploadCompleteState.OK assert future.e_tag is None ctx.logger.info( @@ -513,82 +532,127 @@ async def _directory_creator(dir_name: str): f"--> done waiting, data is completely uploaded [{attempt.retry_state.retry_object.statistics}]", ) - return directory_file_upload + return directory_file_id return _directory_creator +async def _upload_file_to_s3( + s3_client: SimcoreS3API, + faker: Faker, + *, + s3_bucket: S3BucketName, + local_file: Path, + file_id: SimcoreS3FileID, +) -> dict[SHA256Str, FileIDDict]: + await s3_client.upload_file( + bucket=s3_bucket, + file=local_file, + object_key=file_id, + bytes_transfered_cb=None, + ) + return {file_id: FileIDDict(path=local_file, sha256_checksum=f"{faker.sha256()}")} + + @pytest.fixture async def populate_directory( create_file_of_size: Callable[[ByteSize, str | None], Path], storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, - project_id: ProjectID, - node_id: NodeID, -) -> Callable[..., Awaitable[None]]: + faker: Faker, +) -> Callable[ + [ByteSize, str, ProjectID, NodeID, int, int], + Awaitable[tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]], +]: async def _create_content( file_size_in_dir: ByteSize, dir_name: str, - subdir_count: int = 4, - file_count: int = 5, - ) -> None: - file = create_file_of_size(file_size_in_dir, "some_file") - - async def _create_file(s: int, f: int): - file_name = f"{dir_name}/sub-dir-{s}/file-{f}" - clean_path = Path(f"{project_id}/{node_id}/{file_name}") - await storage_s3_client.upload_file( - bucket=storage_s3_bucket, - file=file, - object_key=TypeAdapter(SimcoreS3FileID).validate_python( - f"{clean_path}" - ), - bytes_transfered_cb=None, + project_id: ProjectID, + node_id: NodeID, + subdir_count: int, + file_count: int, + ) -> tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]: + assert subdir_count >= 1, "cannot use fixture with subdir_count < 1!" + assert file_count >= 1, "cannot use fixture with file_count < 1!" + + local_file = create_file_of_size(file_size_in_dir, None) + + # Create subdirectories + s3_base_path = Path(f"{project_id}") / f"{node_id}" / dir_name + # NOTE: add a space in the sub directory + s3_subdirs = [ + s3_base_path / f"sub-dir_ect ory-{i}" for i in range(subdir_count) + ] + # Randomly distribute files across subdirectories + selected_subdirs = random.choices(s3_subdirs, k=file_count) # noqa: S311 + # Upload to S3 + with log_context( + logging.INFO, + msg=f"Uploading {file_count} files to S3 (each {file_size_in_dir.human_readable()}, total: {ByteSize(file_count * file_size_in_dir).human_readable()})", + ): + # we ensure the file name contain a space + def _file_name_with_space(): + file_name = faker.unique.file_name() + return f"{file_name[:1]} {file_name[1:]}" + + results = await asyncio.gather( + *( + _upload_file_to_s3( + storage_s3_client, + faker, + s3_bucket=storage_s3_bucket, + local_file=local_file, + file_id=TypeAdapter(SimcoreS3FileID).validate_python( + f"{selected_subdir / _file_name_with_space()}" + ), + ) + for selected_subdir in selected_subdirs + ) ) - tasks = [ - _create_file(s, f) for f in range(file_count) for s in range(subdir_count) - ] + assert len(results) == file_count - await asyncio.gather(*tasks) + # check this is true + counted_uploaded_objects = await storage_s3_client.count_objects( + bucket=storage_s3_bucket, + prefix=s3_base_path, + is_partial_prefix=True, + start_after=None, + use_delimiter=False, + ) + assert counted_uploaded_objects == file_count - file.unlink() + return node_id, {k: v for r in results for k, v in r.items()} return _create_content @pytest.fixture async def delete_directory( - client: TestClient, - storage_s3_client: SimcoreS3API, - storage_s3_bucket: S3BucketName, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, location_id: LocationID, ) -> Callable[..., Awaitable[None]]: - async def _dir_remover(directory_file_upload: FileUploadSchema) -> None: - assert directory_file_upload.urls[0].path - directory_file_id = directory_file_upload.urls[0].path.strip("/") - assert client.app - delete_url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(directory_file_id, safe=""), - ) - .with_query(user_id=user_id) - ) + async def _dir_remover(directory_s3: StorageFileID) -> None: + delete_url = url_from_operation_id( + client, + initialized_app, + "delete_file", + location_id=f"{location_id}", + file_id=directory_s3, + ).with_query(user_id=user_id) + response = await client.delete(f"{delete_url}") - await assert_status(response, status.HTTP_204_NO_CONTENT) + assert_status(response, status.HTTP_204_NO_CONTENT, None) # NOTE: ensures no more files are left in the directory, # even if one file is left this will detect it - list_files_metadata_url = ( - client.app.router["get_files_metadata"] - .url_for(location_id=f"{location_id}") - .with_query(user_id=user_id, uuid_filter=directory_file_id) - ) + list_files_metadata_url = url_from_operation_id( + client, initialized_app, "list_files_metadata", location_id=f"{location_id}" + ).with_query(user_id=user_id, uuid_filter=directory_s3) response = await client.get(f"{list_files_metadata_url}") - data, error = await assert_status(response, status.HTTP_200_OK) + data, error = assert_status(response, status.HTTP_200_OK, list[FileMetaDataGet]) assert error is None assert data == [] @@ -597,27 +661,287 @@ async def _dir_remover(directory_file_upload: FileUploadSchema) -> None: @pytest.fixture async def create_directory_with_files( - create_empty_directory: Callable[..., Awaitable[FileUploadSchema]], - populate_directory: Callable[..., Awaitable[None]], + create_empty_directory: Callable[ + [str, ProjectID, NodeID], Awaitable[SimcoreS3FileID] + ], + populate_directory: Callable[ + [ByteSize, str, ProjectID, NodeID, int, int], + Awaitable[tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]], + ], delete_directory: Callable[..., Awaitable[None]], -) -> Callable[..., AbstractAsyncContextManager[FileUploadSchema]]: - @asynccontextmanager - async def _create_context( - dir_name: str, file_size_in_dir: ByteSize, subdir_count: int, file_count: int - ) -> AsyncIterator[FileUploadSchema]: - directory_file_upload: FileUploadSchema = await create_empty_directory( - dir_name=dir_name +) -> AsyncIterator[ + Callable[ + [str, ByteSize, int, int, ProjectID, NodeID], + Awaitable[ + tuple[SimcoreS3FileID, tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + ], + ] +]: + uploaded_directories = [] + + async def _( + dir_name: str, + file_size_in_dir: ByteSize, + subdir_count: int, + file_count: int, + project_id: ProjectID, + node_id: NodeID, + ) -> tuple[SimcoreS3FileID, tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]]: + directory_file_id = await create_empty_directory(dir_name, project_id, node_id) + + uploaded_files = await populate_directory( + file_size_in_dir, + dir_name, + project_id, + node_id, + subdir_count, + file_count, + ) + + uploaded_directories.append(directory_file_id) + + return directory_file_id, uploaded_files + + yield _ + + await asyncio.gather(*(delete_directory(_) for _ in uploaded_directories)) + + +async def _upload_one_file_task( + upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], + allowed_file_sizes: tuple[ByteSize, ...], + allowed_file_checksums: tuple[SHA256Str, ...], + *, + file_name: str, + file_id: SimcoreS3FileID, + node_id: NodeID, +) -> tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]: + selected_checksum = random.choice(allowed_file_checksums) # noqa: S311 + uploaded_file, uploaded_file_id = await upload_file( + file_size=random.choice(allowed_file_sizes), # noqa: S311 + file_name=file_name, + file_id=file_id, + sha256_checksum=selected_checksum, + ) + assert uploaded_file_id == file_id + return ( + node_id, + { + uploaded_file_id: FileIDDict( + path=uploaded_file, sha256_checksum=selected_checksum + ) + }, + ) + + +async def _upload_folder_task( + create_directory_with_files: Callable[ + ..., + Awaitable[ + tuple[SimcoreS3FileID, tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + ], + ], + allowed_file_sizes: tuple[ByteSize, ...], + *, + dir_name: str, + project_id: ProjectID, + node_id: NodeID, + workspace_file_count: int, +) -> tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]: + dir_file_id, node_files_map = await create_directory_with_files( + dir_name=dir_name, + file_size_in_dir=random.choice(allowed_file_sizes), # noqa: S311 + subdir_count=3, + file_count=workspace_file_count, + project_id=project_id, + node_id=node_id, + ) + assert dir_file_id + return node_files_map + + +@pytest.fixture +async def random_project_with_files( + sqlalchemy_async_engine: AsyncEngine, + create_project: Callable[..., Awaitable[dict[str, Any]]], + create_project_node: Callable[..., Awaitable[NodeID]], + create_simcore_file_id: Callable[ + [ProjectID, NodeID, str, Path | None], SimcoreS3FileID + ], + faker: Faker, + create_directory_with_files: Callable[ + ..., + Awaitable[ + tuple[SimcoreS3FileID, tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + ], + ], + upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], +) -> Callable[ + [ProjectWithFilesParams], + Awaitable[tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]]], +]: + async def _creator( + project_params: ProjectWithFilesParams, + ) -> tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]]: + assert len(project_params.allowed_file_sizes) == len( + project_params.allowed_file_checksums ) + project = await create_project(name="random-project") + node_to_files_mapping: dict[NodeID, dict[SimcoreS3FileID, FileIDDict]] = {} + upload_tasks = [] + for _ in range(project_params.num_nodes): + # Create a node with outputs (files and others) + project_id = ProjectID(project["uuid"]) + node_id = cast(NodeID, faker.uuid4(cast_to=None)) + node_to_files_mapping[node_id] = {} + output3_file_name = faker.file_name() + output3_file_id = create_simcore_file_id( + project_id, node_id, output3_file_name, Path("outputs/output_3") + ) + created_node_id = await create_project_node( + ProjectID(project["uuid"]), + node_id, + outputs={ + "output_1": faker.pyint(), + "output_2": faker.pystr(), + "output_3": f"{output3_file_id}", + }, + ) + assert created_node_id == node_id + + upload_tasks.append( + _upload_one_file_task( + upload_file, + project_params.allowed_file_sizes, + project_params.allowed_file_checksums, + file_name=output3_file_name, + file_id=output3_file_id, + node_id=node_id, + ) + ) + + # some workspace files (these are not referenced in the file_meta_data, only as a folder) + if project_params.workspace_files_count > 0: + upload_tasks.append( + _upload_folder_task( + create_directory_with_files, + project_params.allowed_file_sizes, + dir_name="workspace", + project_id=project_id, + node_id=node_id, + workspace_file_count=project_params.workspace_files_count, + ) + ) + + # add a few random files in the node root space for good measure + for _ in range(random.randint(1, 3)): # noqa: S311 + root_file_name = faker.file_name() + root_file_id = create_simcore_file_id( + project_id, node_id, root_file_name, None + ) + upload_tasks.append( + _upload_one_file_task( + upload_file, + project_params.allowed_file_sizes, + project_params.allowed_file_checksums, + file_name=root_file_name, + file_id=root_file_id, + node_id=node_id, + ), + ) - await populate_directory( - file_size_in_dir=file_size_in_dir, - dir_name=dir_name, - subdir_count=subdir_count, - file_count=file_count, + # upload everything of the node + results = await limited_gather(*upload_tasks, limit=10) + + for node_id, file_id_to_dict_mapping in results: + for file_id, file_dict in file_id_to_dict_mapping.items(): + node_to_files_mapping[node_id][file_id] = file_dict + + project = await get_updated_project(sqlalchemy_async_engine, project["uuid"]) + return project, node_to_files_mapping + + return _creator + + +@pytest.fixture +async def with_random_project_with_files( + random_project_with_files: Callable[ + [ProjectWithFilesParams], + Awaitable[ + tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + ], + ], + project_params: ProjectWithFilesParams, +) -> tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], +]: + return await random_project_with_files(project_params) + + +@pytest.fixture() +async def output_file( + user_id: UserID, project_id: str, sqlalchemy_async_engine: AsyncEngine, faker: Faker +) -> AsyncIterator[FileMetaData]: + node_id = "fd6f9737-1988-341b-b4ac-0614b646fa82" + + # pylint: disable=no-value-for-parameter + + file = FileMetaData.from_simcore_node( + user_id=user_id, + file_id=f"{project_id}/{node_id}/filename.txt", + bucket=TypeAdapter(S3BucketName).validate_python("master-simcore"), + location_id=SimcoreS3DataManager.get_location_id(), + location_name=SimcoreS3DataManager.get_location_name(), + sha256_checksum=faker.sha256(), + ) + file.entity_tag = "df9d868b94e53d18009066ca5cd90e9f" + file.file_size = ByteSize(12) + file.user_id = user_id + async with sqlalchemy_async_engine.begin() as conn: + stmt = ( + file_meta_data.insert() + .values(jsonable_encoder(FileMetaDataAtDB.model_validate(file))) + .returning(literal_column("*")) + ) + result = await conn.execute(stmt) + row = result.one() + assert row + + yield file + + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + file_meta_data.delete().where(file_meta_data.c.file_id == row.file_id) ) - yield directory_file_upload - await delete_directory(directory_file_upload=directory_file_upload) +@pytest.fixture +async def fake_datcore_tokens( + user_id: UserID, sqlalchemy_async_engine: AsyncEngine, faker: Faker +) -> AsyncIterator[tuple[str, str]]: + token_key = cast(str, faker.uuid4()) + token_secret = cast(str, faker.uuid4()) + created_token_ids = [] + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + tokens.insert() + .values( + user_id=user_id, + token_service="pytest", # noqa: S106 + token_data={ + "service": "pytest", + "token_secret": token_secret, + "token_key": token_key, + }, + ) + .returning(tokens.c.token_id) + ) + row = result.one() + created_token_ids.append(row.token_id) + yield token_key, token_secret - return _create_context + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute( + tokens.delete().where(tokens.c.token_id.in_(created_token_ids)) + ) diff --git a/services/storage/tests/data/file_meta_data.csv b/services/storage/tests/data/file_meta_data.csv deleted file mode 100644 index 24bf7dc5ef8..00000000000 --- a/services/storage/tests/data/file_meta_data.csv +++ /dev/null @@ -1,3 +0,0 @@ -file_id,location_id,location,bucket_name,object_name,project_id,node_id,user_id -161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,0,simcore.s3,pytestbucket,161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,161b8782-b13e-5840-9ae2-e2250c231001,ad9bda7f-1dc5-5480-ab22-5fef4fc53eac,21 -161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,0,simcore.s3,pytestbucket,161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,161b8782-b13e-5840-9ae2-e2250c231001,a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8,21 diff --git a/services/storage/tests/data/notebooks.zip b/services/storage/tests/data/notebooks.zip deleted file mode 100644 index 94ebaf90016..00000000000 --- a/services/storage/tests/data/notebooks.zip +++ /dev/null @@ -1,4 +0,0 @@ -1 -2 -3 -4 diff --git a/services/storage/tests/data/outputController.dat b/services/storage/tests/data/outputController.dat deleted file mode 100644 index 94ebaf90016..00000000000 --- a/services/storage/tests/data/outputController.dat +++ /dev/null @@ -1,4 +0,0 @@ -1 -2 -3 -4 diff --git a/services/storage/tests/data/projects.csv b/services/storage/tests/data/projects.csv deleted file mode 100644 index 9ea1d463545..00000000000 --- a/services/storage/tests/data/projects.csv +++ /dev/null @@ -1,2 +0,0 @@ -id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,hidden -151,STANDARD,161b8782-b13e-5840-9ae2-e2250c231001,Kember use case,Kember Cordiac Model with PostPro Viewer,"",21,2019-06-27 11:42:03.168,2019-06-27 11:43:49.128,"{""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"": {""key"": ""simcore/services/comp/kember-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Kember cardiac model"", ""inputs"": {""dt"": 0.01, ""T"": 1000, ""forcing_factor"": 0}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 100}}, ""a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8"": {""key"": ""simcore/services/dynamic/kember-viewer"", ""version"": ""2.9.0"", ""label"": ""kember-viewer"", ""inputs"": {""outputController"": {""nodeUuid"": ""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"", ""output"": ""out_1""}}, ""inputNodes"": [""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac""], ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 100}}}",false,"{}",false diff --git a/services/storage/tests/data/users.csv b/services/storage/tests/data/users.csv deleted file mode 100644 index d35b80d9626..00000000000 --- a/services/storage/tests/data/users.csv +++ /dev/null @@ -1,2 +0,0 @@ -id,name,email,password_hash,status,role,created_at -21,devops,devops@itis.swiss,$5$rounds=1000$jjUWjHSG5F2dMKw.$9VRlE4YLl4bPfIrWkDz/8GtEx1XkzTpuZzyc/uiBFE4,ACTIVE,USER,2019-06-27 11:35:44.828696 diff --git a/services/storage/tests/fixtures/data_models.py b/services/storage/tests/fixtures/data_models.py deleted file mode 100644 index ab225928f62..00000000000 --- a/services/storage/tests/fixtures/data_models.py +++ /dev/null @@ -1,369 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -from collections import deque -from collections.abc import AsyncIterator, Awaitable, Callable -from contextlib import asynccontextmanager -from pathlib import Path -from random import choice, randint -from typing import Any - -import pytest -import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from aiopg.sa.engine import Engine -from faker import Faker -from models_library.basic_types import SHA256Str -from models_library.projects import ProjectID -from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from models_library.users import UserID -from pydantic import ByteSize, TypeAdapter -from pytest_simcore.helpers.faker_factories import random_project, random_user -from servicelib.utils import limited_gather -from simcore_postgres_database.models.project_to_groups import project_to_groups -from simcore_postgres_database.storage_models import projects, users -from sqlalchemy.dialects.postgresql import insert as pg_insert - -from ..helpers.utils import get_updated_project - - -@asynccontextmanager -async def _user_context(aiopg_engine: Engine, *, name: str) -> AsyncIterator[UserID]: - # inject a random user in db - - # NOTE: Ideally this (and next fixture) should be done via webserver API but at this point - # in time, the webserver service would bring more dependencies to other services - # which would turn this test too complex. - - # pylint: disable=no-value-for-parameter - stmt = users.insert().values(**random_user(name=name)).returning(users.c.id) - async with aiopg_engine.acquire() as conn: - result = await conn.execute(stmt) - row = await result.fetchone() - assert row - assert isinstance(row.id, int) - - try: - yield TypeAdapter(UserID).validate_python(row.id) - finally: - async with aiopg_engine.acquire() as conn: - await conn.execute(users.delete().where(users.c.id == row.id)) - - -@pytest.fixture -async def user_id(aiopg_engine: Engine) -> AsyncIterator[UserID]: - async with _user_context(aiopg_engine, name="test-user") as new_user_id: - yield new_user_id - - -@pytest.fixture -async def other_user_id(aiopg_engine: Engine) -> AsyncIterator[UserID]: - async with _user_context(aiopg_engine, name="test-other-user") as new_user_id: - yield new_user_id - - -@pytest.fixture -async def create_project( - user_id: UserID, aiopg_engine: Engine -) -> AsyncIterator[Callable[[], Awaitable[dict[str, Any]]]]: - created_project_uuids = [] - - async def _creator(**kwargs) -> dict[str, Any]: - prj_config = {"prj_owner": user_id} - prj_config.update(kwargs) - async with aiopg_engine.acquire() as conn: - result = await conn.execute( - projects.insert() - .values(**random_project(**prj_config)) - .returning(sa.literal_column("*")) - ) - row = await result.fetchone() - assert row - created_project_uuids.append(row[projects.c.uuid]) - return dict(row) - - yield _creator - # cleanup - async with aiopg_engine.acquire() as conn: - await conn.execute( - projects.delete().where(projects.c.uuid.in_(created_project_uuids)) - ) - - -@pytest.fixture -async def create_project_access_rights( - aiopg_engine: Engine, -) -> AsyncIterator[Callable[[ProjectID, UserID, bool, bool, bool], Awaitable[None]]]: - _created = [] - - async def _creator( - project_id: ProjectID, user_id: UserID, read: bool, write: bool, delete: bool - ) -> None: - async with aiopg_engine.acquire() as conn: - result = await conn.execute( - project_to_groups.insert() - .values( - project_uuid=f"{project_id}", - gid=sa.select(users.c.primary_gid) - .where(users.c.id == f"{user_id}") - .scalar_subquery(), - read=read, - write=write, - delete=delete, - ) - .returning(sa.literal_column("*")) - ) - row = await result.fetchone() - assert row - _created.append( - (row[project_to_groups.c.project_uuid], row[project_to_groups.c.gid]) - ) - - yield _creator - - # cleanup - async with aiopg_engine.acquire() as conn: - await conn.execute( - project_to_groups.delete().where( - sa.or_( - *( - (project_to_groups.c.project_uuid == pid) - & (project_to_groups.c.gid == gid) - for pid, gid in _created - ) - ) - ) - ) - - -@pytest.fixture -async def project_id( - create_project: Callable[[], Awaitable[dict[str, Any]]] -) -> ProjectID: - project = await create_project() - return ProjectID(project["uuid"]) - - -@pytest.fixture -async def collaborator_id(aiopg_engine: Engine) -> AsyncIterator[UserID]: - - async with _user_context(aiopg_engine, name="collaborator") as new_user_id: - yield TypeAdapter(UserID).validate_python(new_user_id) - - -@pytest.fixture -def share_with_collaborator( - aiopg_engine: Engine, - collaborator_id: UserID, - user_id: UserID, - project_id: ProjectID, -) -> Callable[[], Awaitable[None]]: - async def _get_user_group(conn: SAConnection, query_user: int) -> int: - result = await conn.execute( - sa.select(users.c.primary_gid).where(users.c.id == query_user) - ) - row = await result.fetchone() - assert row - primary_gid: int = row[users.c.primary_gid] - return primary_gid - - async def _() -> None: - async with aiopg_engine.acquire() as conn: - result = await conn.execute( - sa.select(projects.c.access_rights).where( - projects.c.uuid == f"{project_id}" - ) - ) - row = await result.fetchone() - assert row - access_rights: dict[str | int, Any] = row[projects.c.access_rights] - - access_rights[await _get_user_group(conn, user_id)] = { - "read": True, - "write": True, - "delete": True, - } - access_rights[await _get_user_group(conn, collaborator_id)] = { - "read": True, - "write": True, - "delete": False, - } - - await conn.execute( - projects.update() - .where(projects.c.uuid == f"{project_id}") - .values(access_rights=access_rights) - ) - - # project_to_groups needs to be updated - for group_id, permissions in access_rights.items(): - insert_stmt = pg_insert(project_to_groups).values( - project_uuid=f"{project_id}", - gid=int(group_id), - read=permissions["read"], - write=permissions["write"], - delete=permissions["delete"], - created=sa.func.now(), - modified=sa.func.now(), - ) - on_update_stmt = insert_stmt.on_conflict_do_update( - index_elements=[ - project_to_groups.c.project_uuid, - project_to_groups.c.gid, - ], - set_={ - "read": insert_stmt.excluded.read, - "write": insert_stmt.excluded.write, - "delete": insert_stmt.excluded.delete, - "modified": sa.func.now(), - }, - ) - await conn.execute(on_update_stmt) - - return _ - - -@pytest.fixture -async def create_project_node( - user_id: UserID, aiopg_engine: Engine, faker: Faker -) -> Callable[..., Awaitable[NodeID]]: - async def _creator( - project_id: ProjectID, node_id: NodeID | None = None, **kwargs - ) -> NodeID: - async with aiopg_engine.acquire() as conn: - result = await conn.execute( - sa.select(projects.c.workbench).where( - projects.c.uuid == f"{project_id}" - ) - ) - row = await result.fetchone() - assert row - project_workbench: dict[str, Any] = row[projects.c.workbench] - new_node_id = node_id or NodeID(f"{faker.uuid4()}") - node_data = { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "pytest_fake_node", - } - node_data.update(**kwargs) - project_workbench.update({f"{new_node_id}": node_data}) - await conn.execute( - projects.update() - .where(projects.c.uuid == f"{project_id}") - .values(workbench=project_workbench) - ) - return new_node_id - - return _creator - - -@pytest.fixture -async def random_project_with_files( - aiopg_engine: Engine, - create_project: Callable[..., Awaitable[dict[str, Any]]], - create_project_node: Callable[..., Awaitable[NodeID]], - create_simcore_file_id: Callable[ - [ProjectID, NodeID, str, Path | None], SimcoreS3FileID - ], - upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], - faker: Faker, -) -> Callable[ - [int, tuple[ByteSize, ...], tuple[SHA256Str, ...]], - Awaitable[ - tuple[ - dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]] - ] - ], -]: - async def _creator( - num_nodes: int = 12, - file_sizes: tuple[ByteSize, ...] = ( - TypeAdapter(ByteSize).validate_python("7Mib"), - TypeAdapter(ByteSize).validate_python("110Mib"), - TypeAdapter(ByteSize).validate_python("1Mib"), - ), - file_checksums: tuple[SHA256Str, ...] = ( - TypeAdapter(SHA256Str).validate_python( - "311e2e130d83cfea9c3b7560699c221b0b7f9e5d58b02870bd52b695d8b4aabd" - ), - TypeAdapter(SHA256Str).validate_python( - "08e297db979d3c84f6b072c2a1e269e8aa04e82714ca7b295933a0c9c0f62b2e" - ), - TypeAdapter(SHA256Str).validate_python( - "488f3b57932803bbf644593bd46d95599b1d4da1d63bc020d7ebe6f1c255f7f3" - ), - ), - ) -> tuple[ - dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]] - ]: - assert len(file_sizes) == len(file_checksums) - project = await create_project(name="random-project") - src_projects_list: dict[ - NodeID, dict[SimcoreS3FileID, dict[str, Path | str]] - ] = {} - upload_tasks: deque[Awaitable] = deque() - for _node_index in range(num_nodes): - # NOTE: we put some more outputs in there to simulate a real case better - new_node_id = NodeID(f"{faker.uuid4()}") - output3_file_id = create_simcore_file_id( - ProjectID(project["uuid"]), - new_node_id, - faker.file_name(), - Path("outputs/output3"), - ) - src_node_id = await create_project_node( - ProjectID(project["uuid"]), - new_node_id, - outputs={ - "output_1": faker.pyint(), - "output_2": faker.pystr(), - "output_3": f"{output3_file_id}", - }, - ) - assert src_node_id == new_node_id - - # upload the output 3 and some random other files at the root of each node - src_projects_list[src_node_id] = {} - checksum: SHA256Str = choice(file_checksums) # noqa: S311 - src_file, _ = await upload_file( - file_size=choice(file_sizes), # noqa: S311 - file_name=Path(output3_file_id).name, - file_id=output3_file_id, - sha256_checksum=checksum, - ) - src_projects_list[src_node_id][output3_file_id] = { - "path": src_file, - "sha256_checksum": checksum, - } - - async def _upload_file_and_update_project(project, src_node_id): - src_file_name = faker.file_name() - src_file_uuid = create_simcore_file_id( - ProjectID(project["uuid"]), src_node_id, src_file_name, None - ) - checksum: SHA256Str = choice(file_checksums) # noqa: S311 - src_file, _ = await upload_file( - file_size=choice(file_sizes), # noqa: S311 - file_name=src_file_name, - file_id=src_file_uuid, - sha256_checksum=checksum, - ) - src_projects_list[src_node_id][src_file_uuid] = { - "path": src_file, - "sha256_checksum": checksum, - } - - # add a few random files in the node storage - upload_tasks.extend( - [ - _upload_file_and_update_project(project, src_node_id) - for _ in range(randint(0, 3)) # noqa: S311 - ] - ) - await limited_gather(*upload_tasks, limit=10) - - project = await get_updated_project(aiopg_engine, project["uuid"]) - return project, src_projects_list - - return _creator diff --git a/services/storage/tests/fixtures/datcore_adapter.py b/services/storage/tests/fixtures/datcore_adapter.py deleted file mode 100644 index ba7569b027b..00000000000 --- a/services/storage/tests/fixtures/datcore_adapter.py +++ /dev/null @@ -1,26 +0,0 @@ -import re - -import pytest -from aioresponses import aioresponses as AioResponsesMock -from servicelib.aiohttp import status -from simcore_service_storage.datcore_adapter.datcore_adapter_settings import ( - DatcoreAdapterSettings, -) - - -@pytest.fixture -def datcore_adapter_service_mock( - aioresponses_mocker: AioResponsesMock, -) -> AioResponsesMock: - dat_core_settings = DatcoreAdapterSettings.create_from_envs() - datcore_adapter_base_url = dat_core_settings.endpoint - # mock base endpoint - aioresponses_mocker.get( - datcore_adapter_base_url, status=status.HTTP_200_OK, repeat=True - ) - list_datasets_re = re.compile(rf"^{datcore_adapter_base_url}/datasets") - aioresponses_mocker.get(list_datasets_re, status=status.HTTP_200_OK, repeat=True) - aioresponses_mocker.get( - datcore_adapter_base_url, status=status.HTTP_200_OK, repeat=True, payload={} - ) - return aioresponses_mocker diff --git a/services/storage/tests/helpers/utils.py b/services/storage/tests/helpers/utils.py deleted file mode 100644 index dc98a400073..00000000000 --- a/services/storage/tests/helpers/utils.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging -import os -from typing import Any - -import sqlalchemy as sa -from aiopg.sa.engine import Engine -from simcore_postgres_database.storage_models import projects - -log = logging.getLogger(__name__) - - -def has_datcore_tokens() -> bool: - # TODO: activate tests against BF services in the CI. - # - # CI shall add BF_API_KEY, BF_API_SECRET environs as secrets - # - if not os.environ.get("BF_API_KEY") or not os.environ.get("BF_API_SECRET"): - return False - return True - - -async def get_updated_project(aiopg_engine: Engine, project_id: str) -> dict[str, Any]: - async with aiopg_engine.acquire() as conn: - result = await conn.execute( - sa.select(projects).where(projects.c.uuid == project_id) - ) - row = await result.fetchone() - assert row - return dict(row) diff --git a/services/storage/tests/unit/modules/celery/conftest.py b/services/storage/tests/unit/modules/celery/conftest.py new file mode 100644 index 00000000000..8bbb621ef0b --- /dev/null +++ b/services/storage/tests/unit/modules/celery/conftest.py @@ -0,0 +1,102 @@ +from collections.abc import Callable, Iterable +from datetime import timedelta +from typing import Any + +import pytest +from celery import Celery +from celery.contrib.testing.worker import TestWorkController, start_worker +from celery.signals import worker_init, worker_shutdown +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_storage.modules.celery.client import CeleryTaskQueueClient +from simcore_service_storage.modules.celery.signals import ( + on_worker_init, + on_worker_shutdown, +) +from simcore_service_storage.modules.celery.worker import CeleryTaskQueueWorker + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **app_environment, + "SC_BOOT_MODE": "local-development", + "RABBIT_HOST": "localhost", + "RABBIT_PORT": "5672", + "RABBIT_USER": "mock", + "RABBIT_SECURE": True, + "RABBIT_PASSWORD": "", + }, + ) + + +@pytest.fixture +def celery_conf() -> dict[str, Any]: + return { + "broker_url": "memory://", + "result_backend": "cache+memory://", + "result_expires": timedelta(days=7), + "result_extended": True, + "pool": "threads", + "worker_send_task_events": True, + "task_track_started": True, + "task_send_sent_event": True, + } + + +@pytest.fixture +def celery_app(celery_conf: dict[str, Any]): + return Celery(**celery_conf) + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + msg = "please define a callback that registers the tasks" + raise NotImplementedError(msg) + + +@pytest.fixture +def celery_client( + app_environment: EnvVarsDict, celery_app: Celery +) -> CeleryTaskQueueClient: + return CeleryTaskQueueClient(celery_app) + + +@pytest.fixture +def celery_worker_controller( + app_environment: EnvVarsDict, + register_celery_tasks: Callable[[Celery], None], + celery_app: Celery, +) -> Iterable[TestWorkController]: + + # Signals must be explicitily connected + worker_init.connect(on_worker_init) + worker_shutdown.connect(on_worker_shutdown) + + register_celery_tasks(celery_app) + + with start_worker( + celery_app, + pool="threads", + loglevel="info", + perform_ping_check=False, + worker_kwargs={"hostname": "celery@worker1"}, + ) as worker: + worker_init.send(sender=worker) + + yield worker + + worker_shutdown.send(sender=worker) + + +@pytest.fixture +def celery_worker( + celery_worker_controller: TestWorkController, +) -> CeleryTaskQueueWorker: + assert isinstance(celery_worker_controller.app, Celery) + return CeleryTaskQueueWorker(celery_worker_controller.app) diff --git a/services/storage/tests/unit/modules/celery/test_celery.py b/services/storage/tests/unit/modules/celery/test_celery.py new file mode 100644 index 00000000000..77dd5cf3e2a --- /dev/null +++ b/services/storage/tests/unit/modules/celery/test_celery.py @@ -0,0 +1,191 @@ +import asyncio +import logging +import time +from collections.abc import Callable +from random import randint + +import pytest +from celery import Celery, Task +from celery.contrib.abortable import AbortableTask +from common_library.errors_classes import OsparcErrorMixin +from models_library.progress_bar import ProgressReport +from pydantic import TypeAdapter, ValidationError +from servicelib.logging_utils import log_context +from simcore_service_storage.modules.celery import get_event_loop +from simcore_service_storage.modules.celery._task import define_task +from simcore_service_storage.modules.celery.client import CeleryTaskQueueClient +from simcore_service_storage.modules.celery.models import ( + TaskContext, + TaskError, + TaskState, +) +from simcore_service_storage.modules.celery.utils import ( + get_celery_worker, + get_fastapi_app, +) +from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed + +_logger = logging.getLogger(__name__) + + +async def _async_archive( + celery_app: Celery, task_name: str, task_id: str, files: list[str] +) -> str: + worker = get_celery_worker(celery_app) + + def sleep_for(seconds: float) -> None: + time.sleep(seconds) + + for n, file in enumerate(files, start=1): + with log_context(_logger, logging.INFO, msg=f"Processing file {file}"): + worker.set_task_progress( + task_name=task_name, + task_id=task_id, + report=ProgressReport(actual_value=n / len(files) * 10), + ) + await asyncio.get_event_loop().run_in_executor(None, sleep_for, 1) + + return "archive.zip" + + +def sync_archive(task: Task, files: list[str]) -> str: + assert task.name + _logger.info("Calling async_archive") + return asyncio.run_coroutine_threadsafe( + _async_archive(task.app, task.name, task.request.id, files), + get_event_loop(get_fastapi_app(task.app)), + ).result() + + +class MyError(OsparcErrorMixin, Exception): + msg_template = "Something strange happened: {msg}" + + +def failure_task(task: Task): + msg = "BOOM!" + raise MyError(msg=msg) + + +def dreamer_task(task: AbortableTask) -> list[int]: + numbers = [] + for _ in range(30): + if task.is_aborted(): + _logger.warning("Alarm clock") + return numbers + numbers.append(randint(1, 90)) # noqa: S311 + time.sleep(1) + return numbers + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + def _(celery_app: Celery) -> None: + define_task(celery_app, sync_archive) + define_task(celery_app, failure_task) + define_task(celery_app, dreamer_task) + + return _ + + +@pytest.mark.usefixtures("celery_worker") +async def test_submitting_task_calling_async_function_results_with_success_state( + celery_client: CeleryTaskQueueClient, +): + task_context = TaskContext(user_id=42) + + task_uuid = await celery_client.send_task( + "sync_archive", + task_context=task_context, + files=[f"file{n}" for n in range(5)], + ) + + for attempt in Retrying( + retry=retry_if_exception_type(AssertionError), + wait=wait_fixed(1), + stop=stop_after_delay(30), + ): + with attempt: + status = await celery_client.get_task_status(task_context, task_uuid) + assert status.task_state == TaskState.SUCCESS + + assert ( + await celery_client.get_task_status(task_context, task_uuid) + ).task_state == TaskState.SUCCESS + assert ( + await celery_client.get_task_result(task_context, task_uuid) + ) == "archive.zip" + + +@pytest.mark.usefixtures("celery_worker") +async def test_submitting_task_with_failure_results_with_error( + celery_client: CeleryTaskQueueClient, +): + task_context = TaskContext(user_id=42) + + task_uuid = await celery_client.send_task("failure_task", task_context=task_context) + + for attempt in Retrying( + retry=retry_if_exception_type((AssertionError, ValidationError)), + wait=wait_fixed(1), + stop=stop_after_delay(30), + ): + with attempt: + raw_result = await celery_client.get_task_result(task_context, task_uuid) + result = TypeAdapter(TaskError).validate_python(raw_result) + assert isinstance(result, TaskError) + + assert ( + await celery_client.get_task_status(task_context, task_uuid) + ).task_state == TaskState.ERROR + raw_result = await celery_client.get_task_result(task_context, task_uuid) + result = TypeAdapter(TaskError).validate_python(raw_result) + assert f"{result.exc_msg}" == "Something strange happened: BOOM!" + + +@pytest.mark.usefixtures("celery_worker") +async def test_aborting_task_results_with_aborted_state( + celery_client: CeleryTaskQueueClient, +): + task_context = TaskContext(user_id=42) + + task_uuid = await celery_client.send_task( + "dreamer_task", + task_context=task_context, + ) + + await celery_client.abort_task(task_context, task_uuid) + + for attempt in Retrying( + retry=retry_if_exception_type(AssertionError), + wait=wait_fixed(1), + stop=stop_after_delay(30), + ): + with attempt: + progress = await celery_client.get_task_status(task_context, task_uuid) + assert progress.task_state == TaskState.ABORTED + + assert ( + await celery_client.get_task_status(task_context, task_uuid) + ).task_state == TaskState.ABORTED + + +@pytest.mark.usefixtures("celery_worker") +async def test_listing_task_uuids_contains_submitted_task( + celery_client: CeleryTaskQueueClient, +): + task_context = TaskContext(user_id=42) + + task_uuid = await celery_client.send_task( + "dreamer_task", + task_context=task_context, + ) + + for attempt in Retrying( + retry=retry_if_exception_type(AssertionError), + wait=wait_fixed(1), + stop=stop_after_delay(10), + ): + with attempt: + assert task_uuid in await celery_client.get_task_uuids(task_context) + + assert task_uuid in await celery_client.get_task_uuids(task_context) diff --git a/services/storage/tests/unit/test__legacy_storage_sdk_compatibility.py b/services/storage/tests/unit/test__legacy_storage_sdk_compatibility.py index 5e94a17d3bc..0531f02c6b1 100644 --- a/services/storage/tests/unit/test__legacy_storage_sdk_compatibility.py +++ b/services/storage/tests/unit/test__legacy_storage_sdk_compatibility.py @@ -11,27 +11,92 @@ used in simcore_sdk since legacy services are planned to be deprecated. """ +import logging +from collections.abc import AsyncIterator from pathlib import Path +from threading import Thread import aiohttp +import httpx import pytest -from aiohttp.test_utils import TestClient +import uvicorn from faker import Faker -from models_library.projects_nodes_io import LocationID, SimcoreS3FileID +from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID +from pytest_simcore.helpers.logging_tools import log_context +from servicelib.utils import unused_port +from simcore_service_storage._meta import API_VTAG +from simcore_service_storage.core.application import create_app +from simcore_service_storage.core.settings import ApplicationSettings from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from simcore_service_storage_sdk import ApiClient, Configuration, UsersApi - -pytest_simcore_core_services_selection = [ - "postgres", -] +from tenacity import ( + before_sleep_log, + retry, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) +from yarl import URL + +pytest_simcore_core_services_selection = ["postgres", "rabbit"] pytest_simcore_ops_services_selection = [ "adminer", ] +_logger = logging.getLogger(__name__) + + +@retry( + wait=wait_fixed(1), + stop=stop_after_delay(10), + retry=retry_if_exception_type(), + reraise=True, + before_sleep=before_sleep_log(_logger, logging.WARNING), +) +async def _wait_for_server_ready(server: URL) -> None: + async with httpx.AsyncClient(follow_redirects=True) as client: + response = await client.get(f"{server}") + response.raise_for_status() + + +@pytest.fixture +async def real_storage_server(app_settings: ApplicationSettings) -> AsyncIterator[URL]: + settings = ApplicationSettings.create_from_envs() + app = create_app(settings) + storage_port = unused_port() + with log_context( + logging.INFO, + msg=f"with fake storage server on 127.0.0.1:{storage_port}/{API_VTAG}", + ) as ctx: + config = uvicorn.Config( + app, + host="127.0.0.1", + port=storage_port, + log_level="error", + ) + server = uvicorn.Server(config) + + thread = Thread(target=server.run) + thread.daemon = True + thread.start() + + ctx.logger.info( + "health at : %s", + f"http://127.0.0.1:{storage_port}/{API_VTAG}", + ) + server_url = URL(f"http://127.0.0.1:{storage_port}") + + await _wait_for_server_ready(server_url / API_VTAG) + + yield server_url + + server.should_exit = True + thread.join(timeout=10) + @pytest.fixture -def user_id(user_id: UserID) -> str: +def str_user_id(user_id: UserID) -> str: """overrides tests/fixtures/data_models.py::user_id and adapts to simcore_service_storage_sdk API """ @@ -47,21 +112,22 @@ def file_id(simcore_file_id: SimcoreS3FileID) -> str: @pytest.fixture -def location_id() -> LocationID: - return SimcoreS3DataManager.get_location_id() - - -@pytest.fixture -def location_name() -> str: +def simcore_location_name() -> str: return SimcoreS3DataManager.get_location_name() -async def test_storage_client_used_in_simcore_sdk_0_3_2( # noqa: PLR0915 - client: TestClient, +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +async def test_storage_client_used_in_simcore_sdk_0_3_2( + real_storage_server: URL, + str_user_id: str, file_id: str, - user_id: str, location_id: int, - location_name: str, + simcore_location_name: str, tmp_path: Path, faker: Faker, ): @@ -76,14 +142,12 @@ async def test_storage_client_used_in_simcore_sdk_0_3_2( # noqa: PLR0915 the OAS had already change!!! """ - assert client.app - assert client.server # -------- cfg = Configuration() - cfg.host = f"http://{client.host}:{client.port}/v0" + cfg.host = f"{real_storage_server / API_VTAG}" cfg.debug = True - assert cfg.host == f'{client.make_url("/v0")}' + # assert cfg.host == f"{client.make_url('/v0')}" print(f"{cfg=}") print(f"{cfg.to_debug_report()=}") @@ -101,7 +165,7 @@ async def test_storage_client_used_in_simcore_sdk_0_3_2( # noqa: PLR0915 response_payload, status_code, response_headers, - ) = await api.get_storage_locations_with_http_info(user_id) + ) = await api.get_storage_locations_with_http_info(str_user_id) print(f"{response_payload=}") print(f"{status_code=}") print(f"{response_headers=}") @@ -112,7 +176,7 @@ async def test_storage_client_used_in_simcore_sdk_0_3_2( # noqa: PLR0915 # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/packages/simcore-sdk/src/simcore_sdk/node_ports/filemanager.py#L132 resp_model = await api.upload_file( location_id=location_id, - user_id=user_id, + user_id=str_user_id, file_id=file_id, _request_timeout=1000, ) @@ -138,24 +202,24 @@ async def test_storage_client_used_in_simcore_sdk_0_3_2( # noqa: PLR0915 # A bug in the response of this call was preventing downloading data # with the new storage API # - resp_model = await api.get_file_metadata(file_id, location_id, user_id) + resp_model = await api.get_file_metadata(file_id, location_id, str_user_id) print(type(resp_model), ":\n", resp_model) assert resp_model.data.object_name is not None assert resp_model.error is None # _get_location_id_from_location_name # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/packages/simcore-sdk/src/simcore_sdk/node_ports/filemanager.py#L89 - resp_model = await api.get_storage_locations(user_id=user_id) + resp_model = await api.get_storage_locations(user_id=str_user_id) print(f"{resp_model=}") for location in resp_model.data: - assert location["name"] == location_name + assert location["name"] == simcore_location_name assert location["id"] == location_id # _get_download_link # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/packages/simcore-sdk/src/simcore_sdk/node_ports/filemanager.py#L123 resp_model = await api.download_file( location_id=location_id, - user_id=user_id, + user_id=str_user_id, file_id=file_id, _request_timeout=1000, ) diff --git a/services/storage/tests/unit/test__openapi_specs.py b/services/storage/tests/unit/test__openapi_specs.py deleted file mode 100644 index a32ae3ace6e..00000000000 --- a/services/storage/tests/unit/test__openapi_specs.py +++ /dev/null @@ -1,72 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=too-many-arguments -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -from collections.abc import Callable -from pathlib import Path - -import pytest -import simcore_service_storage.application -from aiohttp import web -from faker import Faker -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.openapi_specs import Entrypoint -from simcore_service_storage._meta import API_VTAG -from simcore_service_storage.resources import storage_resources -from simcore_service_storage.settings import Settings - - -@pytest.fixture(scope="session") -def openapi_specs_path() -> Path: - # overrides pytest_simcore.openapi_specs.app_openapi_specs_path fixture - spec_path: Path = storage_resources.get_path(f"api/{API_VTAG}/openapi.yaml") - return spec_path - - -@pytest.fixture -def app_environment( - mock_env_devel_environment: EnvVarsDict, - monkeypatch: pytest.MonkeyPatch, - faker: Faker, -) -> EnvVarsDict: - return mock_env_devel_environment | setenvs_from_dict( - monkeypatch, - { - # disable index and statics routings - "WEBSERVER_STATICWEB": "null", - }, - ) - - -@pytest.fixture -def app(app_environment: EnvVarsDict) -> web.Application: - assert app_environment - # Expects that: - # - routings happen during setup! - # - all plugins are setup but app is NOT started (i.e events are not triggered) - # - settings = Settings.create_from_envs() - return simcore_service_storage.application.create(settings) - - -@pytest.fixture -def app_rest_entrypoints( - app: web.Application, - create_aiohttp_app_rest_entrypoints: Callable[[web.Application], set[Entrypoint]], -) -> set[Entrypoint]: - # check whether exposed routes implements openapi.json contract - return create_aiohttp_app_rest_entrypoints(app) - - -def test_app_named_resources_against_openapi_specs( - openapi_specs_entrypoints: set[Entrypoint], - app_rest_entrypoints: set[Entrypoint], -): - assert app_rest_entrypoints == openapi_specs_entrypoints - - # NOTE: missing here is: - # - input schemas (path, query and body) - # - output schemas (success/error responses and bodies) - # diff --git a/services/storage/tests/unit/test__worker_tasks_paths.py b/services/storage/tests/unit/test__worker_tasks_paths.py new file mode 100644 index 00000000000..c8d508d4129 --- /dev/null +++ b/services/storage/tests/unit/test__worker_tasks_paths.py @@ -0,0 +1,216 @@ +# pylint:disable=no-name-in-module +# pylint:disable=protected-access +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=too-many-positional-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable + + +import random +from pathlib import Path +from typing import Any, TypeAlias + +import httpx +import pytest +from celery import Celery, Task +from faker import Faker +from fastapi import FastAPI +from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID +from models_library.users import UserID +from pydantic import ByteSize, TypeAdapter +from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams +from simcore_service_storage.api._worker_tasks._paths import compute_path_size +from simcore_service_storage.modules.celery.utils import set_fastapi_app +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager + +pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_ops_services_selection = ["adminer"] + +_IsFile: TypeAlias = bool + + +def _filter_and_group_paths_one_level_deeper( + paths: list[Path], prefix: Path +) -> list[tuple[Path, _IsFile]]: + relative_paths = (path for path in paths if path.is_relative_to(prefix)) + return sorted( + { + ( + (path, len(path.relative_to(prefix).parts) == 1) + if len(path.relative_to(prefix).parts) == 1 + else (prefix / path.relative_to(prefix).parts[0], False) + ) + for path in relative_paths + }, + key=lambda x: x[0], + ) + + +async def _assert_compute_path_size( + celery_task: Task, + location_id: LocationID, + user_id: UserID, + *, + path: Path, + expected_total_size: int, +) -> ByteSize: + response = await compute_path_size( + celery_task, user_id=user_id, location_id=location_id, path=path + ) + assert isinstance(response, ByteSize) + assert response == expected_total_size + return response + + +@pytest.fixture +def fake_celery_task(celery_app: Celery, initialized_app: FastAPI) -> Task: + celery_task = Task() + celery_task.app = celery_app + set_fastapi_app(celery_app, initialized_app) + return celery_task + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=5, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=10, + ) + ], + ids=str, +) +async def test_path_compute_size( + fake_celery_task: Task, + location_id: LocationID, + user_id: UserID, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ], + project_params: ProjectWithFilesParams, +): + assert ( + len(project_params.allowed_file_sizes) == 1 + ), "test preconditions are not filled! allowed file sizes should have only 1 option for this test" + project, list_of_files = with_random_project_with_files + + total_num_files = sum( + len(files_in_node) for files_in_node in list_of_files.values() + ) + + # get size of a full project + expected_total_size = project_params.allowed_file_sizes[0] * total_num_files + path = Path(project["uuid"]) + await _assert_compute_path_size( + fake_celery_task, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of one of the nodes + selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + path = Path(project["uuid"]) / f"{selected_node_id}" + selected_node_s3_keys = [ + Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + await _assert_compute_path_size( + fake_celery_task, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of the outputs of one of the nodes + path = Path(project["uuid"]) / f"{selected_node_id}" / "outputs" + selected_node_s3_keys = [ + Path(s3_object_id) + for s3_object_id in list_of_files[selected_node_id] + if s3_object_id.startswith(f"{path}") + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + await _assert_compute_path_size( + fake_celery_task, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of workspace in one of the nodes (this is semi-cached in the DB) + path = Path(project["uuid"]) / f"{selected_node_id}" / "workspace" + selected_node_s3_keys = [ + Path(s3_object_id) + for s3_object_id in list_of_files[selected_node_id] + if s3_object_id.startswith(f"{path}") + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + workspace_total_size = await _assert_compute_path_size( + fake_celery_task, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of folders inside the workspace + folders_inside_workspace = [ + p[0] + for p in _filter_and_group_paths_one_level_deeper(selected_node_s3_keys, path) + if p[1] is False + ] + accumulated_subfolder_size = 0 + for workspace_subfolder in folders_inside_workspace: + selected_node_s3_keys = [ + Path(s3_object_id) + for s3_object_id in list_of_files[selected_node_id] + if s3_object_id.startswith(f"{workspace_subfolder}") + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + accumulated_subfolder_size += await _assert_compute_path_size( + fake_celery_task, + location_id, + user_id, + path=workspace_subfolder, + expected_total_size=expected_total_size, + ) + + assert workspace_total_size == accumulated_subfolder_size + + +async def test_path_compute_size_inexistent_path( + fake_celery_task: Task, + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + faker: Faker, + fake_datcore_tokens: tuple[str, str], +): + await _assert_compute_path_size( + fake_celery_task, + location_id, + user_id, + path=Path(faker.file_path(absolute=False)), + expected_total_size=0, + ) diff --git a/services/storage/tests/unit/test_cli.py b/services/storage/tests/unit/test_cli.py index ad31a85e31f..a81274745c4 100644 --- a/services/storage/tests/unit/test_cli.py +++ b/services/storage/tests/unit/test_cli.py @@ -2,48 +2,34 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import contextlib -import json import os -from io import StringIO -import pytest -from dotenv import dotenv_values +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_storage._meta import API_VERSION from simcore_service_storage.cli import main -from simcore_service_storage.settings import Settings +from simcore_service_storage.core.settings import ApplicationSettings from typer.testing import CliRunner -@pytest.mark.parametrize( - "arguments", ["--help", "run --help".split(), "settings --help".split()] -) -def test_cli_help(arguments: list[str] | str, cli_runner: CliRunner): - result = cli_runner.invoke(main, arguments) - assert result.exit_code == os.EX_OK, result +def test_cli_help_and_version(cli_runner: CliRunner): + result = cli_runner.invoke(main, "--help") + assert result.exit_code == os.EX_OK, result.output + result = cli_runner.invoke(main, "--version") + assert result.exit_code == os.EX_OK, result.output + assert result.stdout.strip() == API_VERSION -def test_cli_settings_as_json( - project_env_devel_environment: None, cli_runner: CliRunner -): - result = cli_runner.invoke(main, ["settings", "--as-json"]) - assert result.exit_code == os.EX_OK, result - # reuse resulting json to build settings - settings: dict = json.loads(result.stdout) - assert Settings(settings) +def test_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK -def test_cli_settings_env_file( - project_env_devel_environment: None, cli_runner: CliRunner -): - result = cli_runner.invoke(main, ["settings", "--compact"]) - assert result.exit_code == os.EX_OK, result + print(result.output) + settings = ApplicationSettings(result.output) + assert settings.model_dump() == ApplicationSettings.create_from_envs().model_dump() - # reuse resulting env_file to build settings - env_file = StringIO(result.stdout) - settings = dotenv_values(stream=env_file) - for key, value in settings.items(): - with contextlib.suppress(json.decoder.JSONDecodeError): - settings[key] = json.loads(str(value)) - - assert Settings(settings) +def test_run(cli_runner: CliRunner): + result = cli_runner.invoke(main, ["run"]) + assert result.exit_code == 0 + assert "disabled" in result.stdout diff --git a/services/storage/tests/unit/test_data_export.py b/services/storage/tests/unit/test_data_export.py new file mode 100644 index 00000000000..05c0f99a176 --- /dev/null +++ b/services/storage/tests/unit/test_data_export.py @@ -0,0 +1,601 @@ +# pylint: disable=W0621 +# pylint: disable=W0613 +# pylint: disable=R6301 +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Literal, NamedTuple +from uuid import UUID + +import pytest +from celery.exceptions import CeleryError +from faker import Faker +from fastapi import FastAPI +from models_library.api_schemas_long_running_tasks.tasks import TaskResult +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobId, + AsyncJobNameData, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import ( + JobAbortedError, + JobError, + JobMissingError, + JobNotDoneError, + JobSchedulerError, +) +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from models_library.api_schemas_storage.data_export_async_jobs import ( + DataExportTaskStartInput, +) +from models_library.progress_bar import ProgressReport +from models_library.projects_nodes_io import NodeID, SimcoreS3FileID +from models_library.users import UserID +from pydantic import ByteSize, TypeAdapter +from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs +from servicelib.rabbitmq.rpc_interfaces.storage.data_export import start_data_export +from settings_library.rabbit import RabbitSettings +from simcore_service_storage.api.rpc._data_export import AccessRightError +from simcore_service_storage.core.settings import ApplicationSettings +from simcore_service_storage.modules.celery.client import TaskUUID +from simcore_service_storage.modules.celery.models import TaskState, TaskStatus +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager + +pytest_plugins = [ + "pytest_simcore.rabbit_service", +] + + +pytest_simcore_core_services_selection = [ + "rabbit", + "postgres", +] + +_faker = Faker() + + +@dataclass +class _MockCeleryClient: + send_task_object: UUID | Exception | None = None + get_task_status_object: TaskStatus | Exception | None = None + get_task_result_object: TaskResult | Exception | None = None + get_task_uuids_object: set[UUID] | Exception | None = None + abort_task_object: Exception | None = None + + async def send_task(self, *args, **kwargs) -> TaskUUID: + assert self.send_task_object is not None + if isinstance(self.send_task_object, Exception): + raise self.send_task_object + return self.send_task_object + + async def get_task_status(self, *args, **kwargs) -> TaskStatus: + assert self.get_task_status_object is not None + if isinstance(self.get_task_status_object, Exception): + raise self.get_task_status_object + return self.get_task_status_object + + async def get_task_result(self, *args, **kwargs) -> Any: + assert self.get_task_result_object is not None + if isinstance(self.get_task_result_object, Exception): + raise self.get_task_result_object + return self.get_task_result_object + + async def get_task_uuids(self, *args, **kwargs) -> set[TaskUUID]: + assert self.get_task_uuids_object is not None + if isinstance(self.get_task_uuids_object, Exception): + raise self.get_task_uuids_object + return self.get_task_uuids_object + + async def abort_task(self, *args, **kwargs) -> None: + if isinstance(self.abort_task_object, Exception): + raise self.abort_task_object + return self.abort_task_object + + +@pytest.fixture +async def mock_celery_client( + mocker: MockerFixture, + request: pytest.FixtureRequest, +) -> _MockCeleryClient: + params = request.param if hasattr(request, "param") else {} + _celery_client = _MockCeleryClient( + send_task_object=params.get("send_task_object", None), + get_task_status_object=params.get("get_task_status_object", None), + get_task_result_object=params.get("get_task_result_object", None), + get_task_uuids_object=params.get("get_task_uuids_object", None), + abort_task_object=params.get("abort_task_object", None), + ) + mocker.patch( + "simcore_service_storage.api.rpc._async_jobs.get_celery_client", + return_value=_celery_client, + ) + mocker.patch( + "simcore_service_storage.api.rpc._data_export.get_celery_client", + return_value=_celery_client, + ) + return _celery_client + + +@pytest.fixture +async def app_environment( + app_environment: EnvVarsDict, + rabbit_service: RabbitSettings, + monkeypatch: pytest.MonkeyPatch, +): + new_envs = setenvs_from_dict( + monkeypatch, + { + **app_environment, + "RABBIT_HOST": rabbit_service.RABBIT_HOST, + "RABBIT_PORT": f"{rabbit_service.RABBIT_PORT}", + "RABBIT_USER": rabbit_service.RABBIT_USER, + "RABBIT_SECURE": f"{rabbit_service.RABBIT_SECURE}", + "RABBIT_PASSWORD": rabbit_service.RABBIT_PASSWORD.get_secret_value(), + }, + ) + + settings = ApplicationSettings.create_from_envs() + assert settings.STORAGE_RABBITMQ + + return new_envs + + +@pytest.fixture +async def rpc_client( + initialized_app: FastAPI, + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + return await rabbitmq_rpc_client("client") + + +class UserWithFile(NamedTuple): + user: UserID + file: Path + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params,selection_type", + [ + ( + ProjectWithFilesParams( + num_nodes=1, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=10, + ), + "file", + ), + ( + ProjectWithFilesParams( + num_nodes=1, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=10, + ), + "folder", + ), + ], + ids=str, +) +@pytest.mark.parametrize( + "mock_celery_client", + [ + {"send_task_object": TaskUUID(_faker.uuid4())}, + ], + indirect=True, +) +async def test_start_data_export_success( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ], + user_id: UserID, + selection_type: Literal["file", "folder"], +): + _, list_of_files = with_random_project_with_files + workspace_files = [ + p for p in next(iter(list_of_files.values())) if "/workspace/" in p + ] + assert len(workspace_files) > 0 + file_or_folder_id: SimcoreS3FileID + if selection_type == "file": + file_or_folder_id = workspace_files[0] + elif selection_type == "folder": + parts = Path(workspace_files[0]).parts + parts = parts[0 : parts.index("workspace") + 1] + assert len(parts) > 0 + folder = Path(*parts) + assert folder.name == "workspace" + file_or_folder_id = f"{folder}" + else: + pytest.fail(f"invalid parameter: {selection_type=}") + + result = await start_data_export( + rpc_client, + job_id_data=AsyncJobNameData(user_id=user_id, product_name="osparc"), + data_export_start=DataExportTaskStartInput( + location_id=0, + file_and_folder_ids=[file_or_folder_id], + ), + ) + assert isinstance(result, AsyncJobGet) + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=1, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=10, + ), + ], + ids=str, +) +@pytest.mark.parametrize( + "mock_celery_client", + [ + {"send_task_object": CeleryError("error")}, + ], + indirect=True, +) +async def test_start_data_export_scheduler_error( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ], + user_id: UserID, +): + + _, list_of_files = with_random_project_with_files + workspace_files = [ + p for p in list(list_of_files.values())[0].keys() if "/workspace/" in p + ] + assert len(workspace_files) > 0 + file_or_folder_id = workspace_files[0] + + with pytest.raises(JobSchedulerError): + _ = await start_data_export( + rpc_client, + job_id_data=AsyncJobNameData(user_id=user_id, product_name="osparc"), + data_export_start=DataExportTaskStartInput( + location_id=0, + file_and_folder_ids=[file_or_folder_id], + ), + ) + + +@pytest.mark.parametrize( + "mock_celery_client", + [ + {"send_task_object": TaskUUID(_faker.uuid4())}, + ], + indirect=True, +) +async def test_start_data_export_access_error( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, + user_id: UserID, + faker: Faker, +): + with pytest.raises(AccessRightError): + _ = await async_jobs.submit( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + method_name="start_data_export", + job_id_data=AsyncJobNameData(user_id=user_id, product_name="osparc"), + data_export_start=DataExportTaskStartInput( + location_id=0, + file_and_folder_ids=[ + f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()}" + ], + ), + ) + + +@pytest.mark.parametrize( + "mock_celery_client", + [ + { + "abort_task_object": None, + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + ], + indirect=True, +) +async def test_abort_data_export_success( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, +): + assert mock_celery_client.get_task_uuids_object is not None + assert not isinstance(mock_celery_client.get_task_uuids_object, Exception) + await async_jobs.cancel( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + job_id=next(iter(mock_celery_client.get_task_uuids_object)), + ) + + +@pytest.mark.parametrize( + "mock_celery_client, expected_exception_type", + [ + ({"abort_task_object": None, "get_task_uuids_object": []}, JobMissingError), + ( + { + "abort_task_object": CeleryError("error"), + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + JobSchedulerError, + ), + ], + indirect=["mock_celery_client"], +) +async def test_abort_data_export_error( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, + expected_exception_type: type[Exception], +): + job_ids = mock_celery_client.get_task_uuids_object + assert job_ids is not None + assert not isinstance(job_ids, Exception) + _job_id = next(iter(job_ids)) if len(job_ids) > 0 else AsyncJobId(_faker.uuid4()) + with pytest.raises(expected_exception_type): + await async_jobs.cancel( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + job_id=_job_id, + ) + + +@pytest.mark.parametrize( + "mock_celery_client", + [ + { + "get_task_status_object": TaskStatus( + task_uuid=TaskUUID(_faker.uuid4()), + task_state=TaskState.RUNNING, + progress_report=ProgressReport(actual_value=0), + ), + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + ], + indirect=True, +) +async def test_get_data_export_status( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, +): + job_ids = mock_celery_client.get_task_uuids_object + assert job_ids is not None + assert not isinstance(job_ids, Exception) + _job_id = next(iter(job_ids)) if len(job_ids) > 0 else AsyncJobId(_faker.uuid4()) + result = await async_jobs.status( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=_job_id, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + ) + assert isinstance(result, AsyncJobStatus) + assert result.job_id == _job_id + + +@pytest.mark.parametrize( + "mock_celery_client, expected_exception_type", + [ + ( + {"get_task_status_object": None, "get_task_uuids_object": []}, + JobMissingError, + ), + ( + { + "get_task_status_object": CeleryError("error"), + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + JobSchedulerError, + ), + ], + indirect=["mock_celery_client"], +) +async def test_get_data_export_status_error( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, + expected_exception_type: type[Exception], +): + job_ids = mock_celery_client.get_task_uuids_object + assert job_ids is not None + assert not isinstance(job_ids, Exception) + _job_id = next(iter(job_ids)) if len(job_ids) > 0 else AsyncJobId(_faker.uuid4()) + with pytest.raises(expected_exception_type): + _ = await async_jobs.status( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=_job_id, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + ) + + +@pytest.mark.parametrize( + "mock_celery_client", + [ + { + "get_task_status_object": TaskStatus( + task_uuid=TaskUUID(_faker.uuid4()), + task_state=TaskState.SUCCESS, + progress_report=ProgressReport(actual_value=100), + ), + "get_task_result_object": "result", + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + ], + indirect=True, +) +async def test_get_data_export_result_success( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, +): + job_ids = mock_celery_client.get_task_uuids_object + assert job_ids is not None + assert not isinstance(job_ids, Exception) + _job_id = next(iter(job_ids)) if len(job_ids) > 0 else AsyncJobId(_faker.uuid4()) + result = await async_jobs.result( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=_job_id, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + ) + assert isinstance(result, AsyncJobResult) + + +@pytest.mark.parametrize( + "mock_celery_client, expected_exception", + [ + ( + { + "get_task_status_object": TaskStatus( + task_uuid=TaskUUID(_faker.uuid4()), + task_state=TaskState.RUNNING, + progress_report=ProgressReport(actual_value=50), + ), + "get_task_result_object": _faker.text(), + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + JobNotDoneError, + ), + ( + { + "get_task_status_object": TaskStatus( + task_uuid=TaskUUID(_faker.uuid4()), + task_state=TaskState.ABORTED, + progress_report=ProgressReport(actual_value=100), + ), + "get_task_result_object": _faker.text(), + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + JobAbortedError, + ), + ( + { + "get_task_status_object": TaskStatus( + task_uuid=TaskUUID(_faker.uuid4()), + task_state=TaskState.ERROR, + progress_report=ProgressReport(actual_value=100), + ), + "get_task_result_object": _faker.text(), + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + JobError, + ), + ( + { + "get_task_status_object": CeleryError("error"), + "get_task_result_object": _faker.text(), + "get_task_uuids_object": [AsyncJobId(_faker.uuid4())], + }, + JobSchedulerError, + ), + ( + { + "get_task_uuids_object": [], + }, + JobMissingError, + ), + ], + indirect=["mock_celery_client"], +) +async def test_get_data_export_result_error( + rpc_client: RabbitMQRPCClient, + mock_celery_client: _MockCeleryClient, + expected_exception: type[Exception], +): + job_ids = mock_celery_client.get_task_uuids_object + assert job_ids is not None + assert not isinstance(job_ids, Exception) + _job_id = next(iter(job_ids)) if len(job_ids) > 0 else AsyncJobId(_faker.uuid4()) + + with pytest.raises(expected_exception): + _ = await async_jobs.result( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=_job_id, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + ) + + +@pytest.mark.parametrize( + "mock_celery_client", + [ + {"get_task_uuids_object": [_faker.uuid4() for _ in range(_faker.pyint(1, 10))]}, + ], + indirect=True, +) +async def test_list_jobs_success( + rpc_client: RabbitMQRPCClient, + mock_celery_client: MockerFixture, +): + result = await async_jobs.list_jobs( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + filter_="", + ) + assert isinstance(result, list) + assert all(isinstance(elm, AsyncJobGet) for elm in result) + + +@pytest.mark.parametrize( + "mock_celery_client", + [ + {"get_task_uuids_object": CeleryError("error")}, + ], + indirect=True, +) +async def test_list_jobs_error( + rpc_client: RabbitMQRPCClient, + mock_celery_client: MockerFixture, +): + with pytest.raises(JobSchedulerError): + _ = await async_jobs.list_jobs( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id_data=AsyncJobNameData( + user_id=_faker.pyint(min_value=1, max_value=100), product_name="osparc" + ), + filter_="", + ) diff --git a/services/storage/tests/unit/test_db_access_layer.py b/services/storage/tests/unit/test_db_access_layer.py index 452e09e1ead..4c268f38fb8 100644 --- a/services/storage/tests/unit/test_db_access_layer.py +++ b/services/storage/tests/unit/test_db_access_layer.py @@ -6,42 +6,50 @@ import pytest import sqlalchemy as sa -from aiopg.sa.engine import Engine from models_library.projects import ProjectID from models_library.users import UserID from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.users import users from simcore_postgres_database.models.workspaces import workspaces -from simcore_service_storage.db_access_layer import ( +from simcore_service_storage.modules.db.access_layer import ( + AccessLayerRepository, AccessRights, - get_file_access_rights, - get_project_access_rights, ) +from sqlalchemy.ext.asyncio import AsyncEngine pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_ops_services_selection = [ + "adminer", +] async def test_access_rights_on_owned_project( - user_id: UserID, project_id: ProjectID, aiopg_engine: Engine + user_id: UserID, project_id: ProjectID, sqlalchemy_async_engine: AsyncEngine ): - async with aiopg_engine.acquire() as conn: - access = await get_project_access_rights(conn, user_id, project_id) - assert access == AccessRights.all() + access = await AccessLayerRepository.instance( + sqlalchemy_async_engine + ).get_project_access_rights(user_id=user_id, project_id=project_id) + assert access == AccessRights.all() - # still NOT registered in file_meta_data BUT with prefix {project_id} owned by user - access = await get_file_access_rights( - conn, user_id, f"{project_id}/node_id/not-in-file-metadata-table.txt" - ) - assert access == AccessRights.all() + # still NOT registered in file_meta_data BUT with prefix {project_id} owned by user + access = await AccessLayerRepository.instance( + sqlalchemy_async_engine + ).get_file_access_rights( + user_id=user_id, + file_id=f"{project_id}/node_id/not-in-file-metadata-table.txt", + ) + assert access == AccessRights.all() @pytest.fixture -async def prepare_db(user_id: UserID, project_id: ProjectID, aiopg_engine: Engine): - async with aiopg_engine.acquire() as conn: +async def prepare_db( + user_id: UserID, project_id: ProjectID, sqlalchemy_async_engine: AsyncEngine +): + async with sqlalchemy_async_engine.connect() as conn: result = await conn.execute( sa.select(users.c.primary_gid).where(users.c.id == user_id) ) - row = await result.first() + row = result.one() user_primary_id = row[0] result = await conn.execute( @@ -57,7 +65,7 @@ async def prepare_db(user_id: UserID, project_id: ProjectID, aiopg_engine: Engin ) .returning(workspaces.c.workspace_id) ) - row = await result.first() + row = result.one() workspace_id = row[0] await conn.execute( @@ -72,14 +80,20 @@ async def prepare_db(user_id: UserID, project_id: ProjectID, aiopg_engine: Engin async def test_access_rights_based_on_workspace( - user_id: UserID, project_id: ProjectID, aiopg_engine: Engine, prepare_db + user_id: UserID, + project_id: ProjectID, + sqlalchemy_async_engine: AsyncEngine, + prepare_db, ): - async with aiopg_engine.acquire() as conn: - access = await get_project_access_rights(conn, user_id, project_id) - assert access == AccessRights.all() + access = await AccessLayerRepository.instance( + sqlalchemy_async_engine + ).get_project_access_rights(user_id=user_id, project_id=project_id) + assert access == AccessRights.all() - # still NOT registered in file_meta_data BUT with prefix {project_id} owned by user - access = await get_file_access_rights( - conn, user_id, f"{project_id}/node_id/not-in-file-metadata-table.txt" - ) - assert access == AccessRights.all() + # still NOT registered in file_meta_data BUT with prefix {project_id} owned by user + access = await AccessLayerRepository.instance( + sqlalchemy_async_engine + ).get_file_access_rights( + user_id=user_id, file_id=f"{project_id}/node_id/not-in-file-metadata-table.txt" + ) + assert access == AccessRights.all() diff --git a/services/storage/tests/unit/test_db_file_meta_data.py b/services/storage/tests/unit/test_db_file_meta_data.py index c362fabe82f..5da3b9b0692 100644 --- a/services/storage/tests/unit/test_db_file_meta_data.py +++ b/services/storage/tests/unit/test_db_file_meta_data.py @@ -8,10 +8,10 @@ from faker import Faker from simcore_postgres_database.utils import as_postgres_sql_query_str -from simcore_service_storage.db_file_meta_data import ( +from simcore_service_storage.models import UserOrProjectFilter +from simcore_service_storage.modules.db.file_meta_data import ( _list_filter_with_partial_file_id_stmt, ) -from simcore_service_storage.models import UserOrProjectFilter def test_building_sql_statements(faker: Faker): diff --git a/services/storage/tests/unit/test_dsm.py b/services/storage/tests/unit/test_dsm.py index 22c78955581..e99dfda1916 100644 --- a/services/storage/tests/unit/test_dsm.py +++ b/services/storage/tests/unit/test_dsm.py @@ -12,9 +12,8 @@ from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from servicelib.utils import limited_gather -from simcore_service_storage.models import FileMetaData, S3BucketName +from simcore_service_storage.models import FileMetaData from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager -from types_aiobotocore_s3 import S3Client pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] @@ -43,31 +42,3 @@ async def dsm_mockup_complete_db( assert len(fmds) == 2 return (fmds[0], fmds[1]) - - -async def test_sync_table_meta_data( - simcore_s3_dsm: SimcoreS3DataManager, - dsm_mockup_complete_db: tuple[FileMetaData, FileMetaData], - storage_s3_bucket: S3BucketName, - s3_client: S3Client, -): - expected_removed_files = [] - # the list should be empty on start - list_changes = await simcore_s3_dsm.synchronise_meta_data_table(dry_run=True) - assert list_changes == expected_removed_files - - # now remove the files - for file_entry in dsm_mockup_complete_db: - s3_key = f"{file_entry.project_id}/{file_entry.node_id}/{file_entry.file_name}" - await s3_client.delete_object(Bucket=storage_s3_bucket, Key=s3_key) - expected_removed_files.append(s3_key) - - # the list should now contain the removed entries - list_changes = await simcore_s3_dsm.synchronise_meta_data_table(dry_run=True) - assert set(list_changes) == set(expected_removed_files) - - # now effectively call the function should really remove the files - list_changes = await simcore_s3_dsm.synchronise_meta_data_table(dry_run=False) - # listing again will show an empty list again - list_changes = await simcore_s3_dsm.synchronise_meta_data_table(dry_run=True) - assert list_changes == [] diff --git a/services/storage/tests/unit/test_dsm_cleaner.py b/services/storage/tests/unit/test_dsm_cleaner.py index 70134dbdb88..3e5f065a7f8 100644 --- a/services/storage/tests/unit/test_dsm_cleaner.py +++ b/services/storage/tests/unit/test_dsm_cleaner.py @@ -6,7 +6,7 @@ from unittest import mock import pytest -from aiohttp.test_utils import TestClient +from fastapi import FastAPI from pytest_mock import MockerFixture from simcore_service_storage.dsm_cleaner import _TASK_NAME_PERIODICALY_CLEAN_DSM @@ -34,7 +34,7 @@ def short_dsm_cleaner_interval(monkeypatch: pytest.MonkeyPatch) -> int: return 1 -async def test_setup_dsm_cleaner(client: TestClient): +async def test_setup_dsm_cleaner(initialized_app: FastAPI): all_tasks = asyncio.all_tasks() assert any( t.get_name().startswith(f"{_TASK_NAME_PERIODICALY_CLEAN_DSM}") @@ -42,7 +42,7 @@ async def test_setup_dsm_cleaner(client: TestClient): ) -async def test_disable_dsm_cleaner(disable_dsm_cleaner, client: TestClient): +async def test_disable_dsm_cleaner(disable_dsm_cleaner, initialized_app: FastAPI): all_tasks = asyncio.all_tasks() assert not any( t.get_name().startswith(f"{_TASK_NAME_PERIODICALY_CLEAN_DSM}") @@ -51,7 +51,9 @@ async def test_disable_dsm_cleaner(disable_dsm_cleaner, client: TestClient): async def test_dsm_cleaner_task_restarts_if_error( - mocked_dsm_clean: mock.Mock, short_dsm_cleaner_interval: int, client: TestClient + mocked_dsm_clean: mock.Mock, + short_dsm_cleaner_interval: int, + initialized_app: FastAPI, ): num_calls = mocked_dsm_clean.call_count await asyncio.sleep(short_dsm_cleaner_interval + 1) diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index 1683a9d0a0d..2f48b85bd03 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -13,27 +13,28 @@ from pathlib import Path from typing import Final -import arrow import pytest -from aiopg.sa.engine import Engine from aws_library.s3 import MultiPartUploadLinks, SimcoreS3API from faker import Faker -from models_library.api_schemas_storage import LinkType +from models_library.api_schemas_storage.storage_schemas import LinkType from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import SimcoreS3DirectoryID, SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.parametrizations import byte_size_ids -from simcore_postgres_database.storage_models import file_meta_data -from simcore_service_storage import db_file_meta_data -from simcore_service_storage.exceptions import ( +from simcore_postgres_database.storage_models import ( + file_meta_data as file_meta_data_table, +) +from simcore_service_storage.exceptions.errors import ( FileAccessRightError, FileMetaDataNotFoundError, ) from simcore_service_storage.models import FileMetaData, S3BucketName, UploadID +from simcore_service_storage.modules.db.file_meta_data import FileMetaDataRepository from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager +from sqlalchemy.ext.asyncio import AsyncEngine -pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_core_services_selection = ["postgres", "rabbit"] pytest_simcore_ops_services_selection = ["adminer"] _faker: Faker = Faker() @@ -72,7 +73,7 @@ def simcore_directory_id(simcore_file_id: SimcoreS3FileID) -> SimcoreS3FileID: @pytest.mark.parametrize("checksum", [None, _faker.sha256()]) async def test_regression_collaborator_creates_file_upload_links( # pylint:disable=too-many-positional-arguments disabled_dsm_cleaner_task, - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, simcore_s3_dsm: SimcoreS3DataManager, simcore_file_id: SimcoreS3FileID, simcore_directory_id: SimcoreS3FileID, @@ -142,7 +143,7 @@ async def test_regression_collaborator_creates_file_upload_links( # pylint:disa @pytest.mark.parametrize("checksum", [None, _faker.sha256()]) async def test_clean_expired_uploads_deletes_expired_pending_uploads( disabled_dsm_cleaner_task, - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, simcore_s3_dsm: SimcoreS3DataManager, simcore_file_id: SimcoreS3FileID, simcore_directory_id: SimcoreS3FileID, @@ -168,8 +169,9 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( is_directory=is_directory, ) # ensure the database is correctly set up - async with aiopg_engine.acquire() as conn: - fmd = await db_file_meta_data.get(conn, file_or_directory_id) + fmd = await FileMetaDataRepository.instance(sqlalchemy_async_engine).get( + file_id=file_or_directory_id + ) assert fmd assert fmd.upload_expires_at # ensure we have now an upload id IF the link was presigned ONLY @@ -184,8 +186,10 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( # now run the cleaner, nothing should happen since the expiration was set to the default of 3600 await simcore_s3_dsm.clean_expired_uploads() # check the entries are still the same - async with aiopg_engine.acquire() as conn: - fmd_after_clean = await db_file_meta_data.get(conn, file_or_directory_id) + async with sqlalchemy_async_engine.connect() as conn: + fmd_after_clean = await FileMetaDataRepository.instance( + sqlalchemy_async_engine + ).get(file_id=file_or_directory_id) assert fmd_after_clean == fmd assert ( await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) @@ -193,25 +197,33 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( ) # now change the upload_expires_at entry to simulate and expired entry - async with aiopg_engine.acquire() as conn: + async with sqlalchemy_async_engine.begin() as conn: await conn.execute( - file_meta_data.update() - .where(file_meta_data.c.file_id == file_or_directory_id) - .values(upload_expires_at=arrow.utcnow().datetime) + file_meta_data_table.update() + .where(file_meta_data_table.c.file_id == file_or_directory_id) + .values(upload_expires_at=datetime.datetime.utcnow()) ) - await asyncio.sleep(1) + await asyncio.sleep(5) await simcore_s3_dsm.clean_expired_uploads() # check the entries were removed - async with aiopg_engine.acquire() as conn: + async with sqlalchemy_async_engine.connect() as conn: with pytest.raises(FileMetaDataNotFoundError): - await db_file_meta_data.get(conn, simcore_file_id) + await FileMetaDataRepository.instance(sqlalchemy_async_engine).get( + file_id=simcore_file_id + ) # since there is no entry in the db, this upload shall be cleaned up assert not await storage_s3_client.list_ongoing_multipart_uploads( bucket=storage_s3_bucket ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "file_size", [ @@ -228,7 +240,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi ..., Awaitable[tuple[Path, SimcoreS3FileID]], ], - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, simcore_s3_dsm: SimcoreS3DataManager, user_id: UserID, link_type: LinkType, @@ -247,8 +259,10 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi file_id=None, sha256_checksum=checksum, ) - async with aiopg_engine.acquire() as conn: - original_fmd = await db_file_meta_data.get(conn, file_id) + async with sqlalchemy_async_engine.connect() as conn: + original_fmd = await FileMetaDataRepository.instance( + sqlalchemy_async_engine + ).get(file_id=file_id) # now create a new link to the VERY SAME FILE UUID await simcore_s3_dsm.create_file_upload_links( @@ -260,8 +274,10 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi is_directory=False, ) # ensure the database is correctly set up - async with aiopg_engine.acquire() as conn: - fmd = await db_file_meta_data.get(conn, file_id) + async with sqlalchemy_async_engine.connect() as conn: + fmd = await FileMetaDataRepository.instance(sqlalchemy_async_engine).get( + file_id=file_id + ) assert fmd assert fmd.upload_expires_at # ensure we have now an upload id IF the link was presigned ONLY @@ -276,8 +292,9 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi # now run the cleaner, nothing should happen since the expiration was set to the default of 3600 await simcore_s3_dsm.clean_expired_uploads() # check the entries are still the same - async with aiopg_engine.acquire() as conn: - fmd_after_clean = await db_file_meta_data.get(conn, file_id) + fmd_after_clean = await FileMetaDataRepository.instance( + sqlalchemy_async_engine + ).get(file_id=file_id) assert fmd_after_clean == fmd assert ( await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) @@ -285,18 +302,19 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi ) # now change the upload_expires_at entry to simulate an expired entry - async with aiopg_engine.acquire() as conn: + async with sqlalchemy_async_engine.begin() as conn: await conn.execute( - file_meta_data.update() - .where(file_meta_data.c.file_id == file_id) - .values(upload_expires_at=arrow.utcnow().datetime) + file_meta_data_table.update() + .where(file_meta_data_table.c.file_id == file_id) + .values(upload_expires_at=datetime.datetime.utcnow()) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() # check the entries were reverted - async with aiopg_engine.acquire() as conn: - reverted_fmd = await db_file_meta_data.get(conn, file_id) + reverted_fmd = await FileMetaDataRepository.instance(sqlalchemy_async_engine).get( + file_id=file_id + ) assert original_fmd.model_dump(exclude={"created_at"}) == reverted_fmd.model_dump( exclude={"created_at"} ) @@ -320,7 +338,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi @pytest.mark.parametrize("checksum", [_faker.sha256(), None]) async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation( disabled_dsm_cleaner_task, - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, simcore_s3_dsm: SimcoreS3DataManager, simcore_file_id: SimcoreS3FileID, simcore_directory_id: SimcoreS3FileID, @@ -335,7 +353,7 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation the cleaner in between to ensure the cleaner does not break the mechanism""" file_or_directory_id = simcore_directory_id if is_directory else simcore_file_id - later_than_now = arrow.utcnow().datetime + datetime.timedelta(minutes=5) + later_than_now = datetime.datetime.utcnow() + datetime.timedelta(minutes=5) fmd = FileMetaData.from_simcore_node( user_id, file_or_directory_id, @@ -347,11 +365,12 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation sha256_checksum=checksum, ) # we create the entry in the db - async with aiopg_engine.acquire() as conn: - await db_file_meta_data.upsert(conn, fmd) + await FileMetaDataRepository.instance(sqlalchemy_async_engine).upsert(fmd=fmd) - # ensure the database is correctly set up - fmd_in_db = await db_file_meta_data.get(conn, file_or_directory_id) + # ensure the database is correctly set up + fmd_in_db = await FileMetaDataRepository.instance(sqlalchemy_async_engine).get( + file_id=file_or_directory_id + ) assert fmd_in_db assert fmd_in_db.upload_expires_at # we create the multipart upload link @@ -388,9 +407,9 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation assert len(started_multipart_uploads_upload_id) == len(file_ids_to_upload) # ensure we have now an upload id - all_ongoing_uploads: list[ - tuple[UploadID, SimcoreS3FileID] - ] = await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) + all_ongoing_uploads: list[tuple[UploadID, SimcoreS3FileID]] = ( + await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) + ) assert len(all_ongoing_uploads) == len(file_ids_to_upload) for ongoing_upload_id, ongoing_file_id in all_ongoing_uploads: diff --git a/services/storage/tests/unit/test_dsm_soft_links.py b/services/storage/tests/unit/test_dsm_soft_links.py index dd822ea2165..aa2d1be9161 100644 --- a/services/storage/tests/unit/test_dsm_soft_links.py +++ b/services/storage/tests/unit/test_dsm_soft_links.py @@ -4,62 +4,15 @@ import uuid from functools import lru_cache -from typing import AsyncIterator -import pytest -from aiopg.sa.engine import Engine -from faker import Faker -from models_library.api_schemas_storage import S3BucketName from models_library.projects_nodes_io import SimcoreS3FileID -from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import ByteSize, TypeAdapter -from simcore_postgres_database.storage_models import file_meta_data -from simcore_service_storage.models import FileMetaData, FileMetaDataAtDB +from simcore_service_storage.models import FileMetaData from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager -from sqlalchemy.sql.expression import literal_column pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] -@pytest.fixture() -async def output_file( - user_id: UserID, project_id: str, aiopg_engine: Engine, faker: Faker -) -> AsyncIterator[FileMetaData]: - node_id = "fd6f9737-1988-341b-b4ac-0614b646fa82" - - # pylint: disable=no-value-for-parameter - - file = FileMetaData.from_simcore_node( - user_id=user_id, - file_id=SimcoreS3FileID(f"{project_id}/{node_id}/filename.txt"), - bucket=TypeAdapter(S3BucketName).validate_python("master-simcore"), - location_id=SimcoreS3DataManager.get_location_id(), - location_name=SimcoreS3DataManager.get_location_name(), - sha256_checksum=faker.sha256(), - ) - file.entity_tag = "df9d868b94e53d18009066ca5cd90e9f" - file.file_size = ByteSize(12) - file.user_id = user_id - - async with aiopg_engine.acquire() as conn: - stmt = ( - file_meta_data.insert() - .values(jsonable_encoder(FileMetaDataAtDB.model_validate(file))) - .returning(literal_column("*")) - ) - result = await conn.execute(stmt) - row = await result.fetchone() - assert row - - yield file - - result = await conn.execute( - file_meta_data.delete().where(file_meta_data.c.file_id == row.file_id) - ) - - def create_reverse_dns(*resource_name_parts) -> str: """ Returns a name for the resource following the reverse domain name notation diff --git a/services/storage/tests/unit/test_handlers_datasets.py b/services/storage/tests/unit/test_handlers_datasets.py index d207005c2d2..5808a63f1f1 100644 --- a/services/storage/tests/unit/test_handlers_datasets.py +++ b/services/storage/tests/unit/test_handlers_datasets.py @@ -4,76 +4,95 @@ # pylint:disable=too-many-arguments # pylint:disable=no-name-in-module - from collections.abc import Awaitable, Callable from pathlib import Path import pytest -from aiohttp.test_utils import TestClient from faker import Faker -from models_library.api_schemas_storage import DatasetMetaDataGet, FileMetaDataGet +from fastapi import FastAPI +from httpx import AsyncClient +from models_library.api_schemas_storage.storage_schemas import ( + DatasetMetaDataGet, + FileMetaDataGet, +) from models_library.projects import ProjectID -from models_library.projects_nodes_io import SimcoreS3FileID +from models_library.projects_nodes_io import LocationID, SimcoreS3FileID from models_library.users import UserID -from pydantic import ByteSize, TypeAdapter +from pydantic import ByteSize from pytest_mock import MockerFixture -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status from pytest_simcore.helpers.parametrizations import ( byte_size_ids, parametrized_file_size, ) from servicelib.aiohttp import status +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] -async def test_get_files_metadata_dataset_with_no_files_returns_empty_array( - client: TestClient, +async def test_list_dataset_files_metadata_with_no_files_returns_empty_array( + initialized_app: FastAPI, + client: AsyncClient, user_id: UserID, project_id: ProjectID, - location_id: int, + location_id: LocationID, + fake_datcore_tokens: tuple[str, str], ): - assert client.app - url = ( - client.app.router["get_files_metadata_dataset"] - .url_for(location_id=f"{location_id}", dataset_id=f"{project_id}") - .with_query(user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "list_dataset_files_metadata", + location_id=location_id, + dataset_id=project_id, + ).with_query(user_id=user_id) + response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + data, error = assert_status(response, status.HTTP_200_OK, list[FileMetaDataGet]) assert data == [] assert not error +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "file_size", [parametrized_file_size("100Mib")], ids=byte_size_ids, ) -async def test_get_files_metadata_dataset( +async def test_list_dataset_files_metadata( upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], - client: TestClient, + initialized_app: FastAPI, + client: AsyncClient, user_id: UserID, project_id: ProjectID, - location_id: int, + location_id: LocationID, file_size: ByteSize, faker: Faker, ): - assert client.app NUM_FILES = 3 for n in range(NUM_FILES): file, file_id = await upload_file(file_size, faker.file_name()) - url = ( - client.app.router["get_files_metadata_dataset"] - .url_for(location_id=f"{location_id}", dataset_id=f"{project_id}") - .with_query(user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "list_dataset_files_metadata", + location_id=location_id, + dataset_id=project_id, + ).with_query(user_id=user_id) + response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) + assert list_fmds assert len(list_fmds) == (n + 1) fmd = list_fmds[n] assert fmd.file_name == file.name @@ -82,48 +101,64 @@ async def test_get_files_metadata_dataset( assert fmd.file_size == file.stat().st_size -async def test_get_datasets_metadata( - client: TestClient, +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +async def test_list_datasets_metadata( + initialized_app: FastAPI, + client: AsyncClient, user_id: UserID, - location_id: int, + location_id: LocationID, project_id: ProjectID, ): - assert client.app - - url = ( - client.app.router["get_datasets_metadata"] - .url_for(location_id=f"{location_id}") - .with_query(user_id=f"{user_id}") - ) + url = url_from_operation_id( + client, + initialized_app, + "list_datasets_metadata", + location_id=location_id, + ).with_query(user_id=user_id) response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data - assert not error - list_datasets = TypeAdapter(list[DatasetMetaDataGet]).validate_python(data) + list_datasets, error = assert_status( + response, status.HTTP_200_OK, list[DatasetMetaDataGet] + ) + assert response.status_code == status.HTTP_200_OK + assert list_datasets assert len(list_datasets) == 1 dataset = list_datasets[0] assert dataset.dataset_id == project_id +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_ensure_expand_dirs_defaults_true( mocker: MockerFixture, - client: TestClient, + initialized_app: FastAPI, + client: AsyncClient, user_id: UserID, project_id: ProjectID, - location_id: int, + location_id: LocationID, ): mocked_object = mocker.patch( "simcore_service_storage.simcore_s3_dsm.SimcoreS3DataManager.list_files_in_dataset", autospec=True, ) - assert client.app - url = ( - client.app.router["get_files_metadata_dataset"] - .url_for(location_id=f"{location_id}", dataset_id=f"{project_id}") - .with_query(user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "list_dataset_files_metadata", + location_id=location_id, + dataset_id=project_id, + ).with_query(user_id=user_id) + await client.get(f"{url}") assert len(mocked_object.call_args_list) == 1 diff --git a/services/storage/tests/unit/test_handlers_datcore.py b/services/storage/tests/unit/test_handlers_datcore.py new file mode 100644 index 00000000000..a7162610031 --- /dev/null +++ b/services/storage/tests/unit/test_handlers_datcore.py @@ -0,0 +1,48 @@ +import httpx +import pytest +from fastapi import FastAPI, status +from models_library.projects_nodes_io import LocationID +from models_library.users import UserID +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status +from simcore_service_storage.datcore_dsm import DatCoreDataManager + +pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_ops_services_selection = ["adminer"] + + +@pytest.mark.parametrize( + "entrypoint", + [ + "list_datasets_metadata", + # "list_dataset_files_metadata", needs dataset_id + "list_files_metadata", + # "get_file_metadata", needs file_id + # "download_file", needs file_id + "list_paths", + ], +) +@pytest.mark.parametrize( + "location_id", + [DatCoreDataManager.get_location_id()], + ids=[DatCoreDataManager.get_location_name()], + indirect=True, +) +async def test_entrypoint_without_api_tokens_return_401( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + entrypoint: str, + user_id: UserID, +): + url = url_from_operation_id( + client, initialized_app, entrypoint, location_id=f"{location_id}" + ).with_query( + user_id=user_id, + ) + response = await client.get(f"{url}") + assert_status( + response, + status.HTTP_401_UNAUTHORIZED, + None, + ) diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 40d4b72f15e..3304e226dd9 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -12,21 +12,20 @@ import logging import urllib.parse from collections.abc import AsyncIterator, Awaitable, Callable -from contextlib import AbstractAsyncContextManager from dataclasses import dataclass from pathlib import Path from random import choice from typing import Any, Literal from uuid import uuid4 +import httpx import pytest from aiohttp import ClientSession -from aiohttp.test_utils import TestClient -from aiopg.sa import Engine from aws_library.s3 import S3KeyNotFoundError, S3ObjectKey, SimcoreS3API from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE from faker import Faker -from models_library.api_schemas_storage import ( +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, @@ -38,26 +37,33 @@ SoftCopyBody, UploadedPart, ) -from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyHttpUrl, ByteSize, HttpUrl, TypeAdapter +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_mock import MockerFixture -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.parametrizations import byte_size_ids from pytest_simcore.helpers.s3 import upload_file_part, upload_file_to_presigned_link +from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams +from pytest_simcore.helpers.storage_utils_file_meta_data import ( + assert_file_meta_data_in_db, +) from servicelib.aiohttp import status from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID -from simcore_service_storage.handlers_files import UPLOAD_TASKS_KEY -from simcore_service_storage.models import S3BucketName, UploadID +from simcore_service_storage.models import FileDownloadResponse, S3BucketName, UploadID +from simcore_service_storage.modules.long_running_tasks import ( + get_completed_upload_tasks, +) +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager +from sqlalchemy.ext.asyncio import AsyncEngine from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -from tests.helpers.utils_file_meta_data import assert_file_meta_data_in_db from types_aiobotocore_s3 import S3Client from yarl import URL @@ -82,9 +88,9 @@ async def assert_multipart_uploads_in_progress( expected_upload_ids: list[str] | None, ): """if None is passed, then it checks that no uploads are in progress""" - list_uploads: list[ - tuple[UploadID, S3ObjectKey] - ] = await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) + list_uploads: list[tuple[UploadID, S3ObjectKey]] = ( + await storage_s3_client.list_ongoing_multipart_uploads(bucket=storage_s3_bucket) + ) if expected_upload_ids is None: assert ( not list_uploads @@ -104,6 +110,12 @@ class SingleLinkParam: expected_chunk_size: ByteSize +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "single_link_param", [ @@ -118,7 +130,7 @@ class SingleLinkParam: ), pytest.param( SingleLinkParam( - {"link_type": "presigned"}, + {"link_type": "PRESIGNED"}, "http", _HTTP_PRESIGNED_LINK_QUERY_KEYS, TypeAdapter(ByteSize).validate_python("5GiB"), @@ -127,7 +139,7 @@ class SingleLinkParam: ), pytest.param( SingleLinkParam( - {"link_type": "s3"}, + {"link_type": "S3"}, "s3", [], TypeAdapter(ByteSize).validate_python("5TiB"), @@ -141,7 +153,7 @@ async def test_create_upload_file_with_file_size_0_returns_single_link( storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, single_link_param: SingleLinkParam, - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], cleanup_user_projects_file_metadata: None, ): @@ -171,7 +183,7 @@ async def test_create_upload_file_with_file_size_0_returns_single_link( # now check the entry in the database is correct, there should be only one await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=True, expected_file_size=-1, @@ -189,28 +201,29 @@ async def test_create_upload_file_with_file_size_0_returns_single_link( @pytest.fixture async def create_upload_file_link_v1( - client: TestClient, user_id: UserID, location_id: LocationID + initialized_app: FastAPI, + client: httpx.AsyncClient, + user_id: UserID, + location_id: LocationID, ) -> AsyncIterator[Callable[..., Awaitable[PresignedLink]]]: file_params: list[tuple[UserID, int, SimcoreS3FileID]] = [] async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLink: - assert client.app - url = ( - client.app.router["upload_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(file_id, safe=""), - ) - .with_query(**query_kwargs, user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "upload_file", + location_id=f"{location_id}", + file_id=file_id, + ).with_query(**query_kwargs, user_id=user_id) assert ( "file_size" not in url.query ), "v1 call to upload_file MUST NOT contain file_size field, this is reserved for v2 call" response = await client.put(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + received_file_upload_link, error = assert_status( + response, status.HTTP_200_OK, PresignedLink + ) assert not error - assert data - received_file_upload_link = TypeAdapter(PresignedLink).validate_python(data) assert received_file_upload_link file_params.append((user_id, location_id, file_id)) return received_file_upload_link @@ -218,21 +231,26 @@ async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLi yield _link_creator # cleanup - assert client.app + clean_tasks = [] for u_id, loc_id, file_id in file_params: - url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{loc_id}", - file_id=urllib.parse.quote(file_id, safe=""), - ) - .with_query(user_id=u_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "upload_file", + location_id=f"{location_id}", + file_id=file_id, + ).with_query(user_id=u_id) clean_tasks.append(client.delete(f"{url}")) await asyncio.gather(*clean_tasks) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "single_link_param", [ @@ -247,7 +265,7 @@ async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLi ), pytest.param( SingleLinkParam( - {"link_type": "presigned"}, + {"link_type": "PRESIGNED"}, "http", _HTTP_PRESIGNED_LINK_QUERY_KEYS, TypeAdapter(ByteSize).validate_python("5GiB"), @@ -256,7 +274,7 @@ async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLi ), pytest.param( SingleLinkParam( - {"link_type": "s3"}, + {"link_type": "S3"}, "s3", [], TypeAdapter(ByteSize).validate_python("5TiB"), @@ -270,7 +288,7 @@ async def test_create_upload_file_with_no_file_size_query_returns_v1_structure( storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, single_link_param: SingleLinkParam, - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, create_upload_file_link_v1: Callable[..., Awaitable[PresignedLink]], cleanup_user_projects_file_metadata: None, ): @@ -289,7 +307,7 @@ async def test_create_upload_file_with_no_file_size_query_returns_v1_structure( ) # now check the entry in the database is correct, there should be only one await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=True, expected_file_size=-1, @@ -314,6 +332,12 @@ class MultiPartParam: expected_chunk_size: ByteSize +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "test_param", [ @@ -374,14 +398,14 @@ async def test_create_upload_file_presigned_with_file_size_returns_multipart_lin storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, test_param: MultiPartParam, - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], cleanup_user_projects_file_metadata: None, ): # create upload file link received_file_upload = await create_upload_file_link_v2( simcore_file_id, - link_type=test_param.link_type.value.lower(), + link_type=test_param.link_type.value, file_size=f"{test_param.file_size}", ) # number of links @@ -395,7 +419,7 @@ async def test_create_upload_file_presigned_with_file_size_returns_multipart_lin # now check the entry in the database is correct, there should be only one expect_upload_id = bool(test_param.file_size >= MULTIPART_UPLOADS_MIN_TOTAL_SIZE) upload_id: UploadID | None = await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=True, expected_file_size=-1, @@ -412,6 +436,12 @@ async def test_create_upload_file_presigned_with_file_size_returns_multipart_lin ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "link_type, file_size", [ @@ -421,8 +451,8 @@ async def test_create_upload_file_presigned_with_file_size_returns_multipart_lin ids=byte_size_ids, ) async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( - aiopg_engine: Engine, - client: TestClient, + sqlalchemy_async_engine: AsyncEngine, + client: httpx.AsyncClient, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, with_versioning_enabled: None, @@ -431,15 +461,14 @@ async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( file_size: ByteSize, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], ): - assert client.app # create upload file link upload_link = await create_upload_file_link_v2( - simcore_file_id, link_type=link_type.value.lower(), file_size=file_size + simcore_file_id, link_type=link_type.value, file_size=file_size ) expect_upload_id = bool(file_size >= MULTIPART_UPLOADS_MIN_TOTAL_SIZE) # we shall have an entry in the db, waiting for upload upload_id = await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=True, expected_file_size=-1, @@ -457,11 +486,11 @@ async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( # delete/abort file upload abort_url = URL(f"{upload_link.links.abort_upload}").relative() response = await client.post(f"{abort_url}") - await assert_status(response, status.HTTP_204_NO_CONTENT) + assert_status(response, status.HTTP_204_NO_CONTENT, None) # the DB shall be cleaned up await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=False, expected_file_size=None, @@ -477,6 +506,12 @@ async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "link_type, file_size", [ @@ -488,8 +523,8 @@ async def test_delete_unuploaded_file_correctly_cleans_up_db_and_s3( ids=byte_size_ids, ) async def test_upload_same_file_uuid_aborts_previous_upload( - aiopg_engine: Engine, - client: TestClient, + sqlalchemy_async_engine: AsyncEngine, + client: httpx.AsyncClient, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, simcore_file_id: SimcoreS3FileID, @@ -497,17 +532,16 @@ async def test_upload_same_file_uuid_aborts_previous_upload( file_size: ByteSize, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], ): - assert client.app # create upload file link file_upload_link = await create_upload_file_link_v2( - simcore_file_id, link_type=link_type.value.lower(), file_size=file_size + simcore_file_id, link_type=link_type.value, file_size=file_size ) expect_upload_id = bool( file_size >= MULTIPART_UPLOADS_MIN_TOTAL_SIZE or link_type == LinkType.S3 ) # we shall have an entry in the db, waiting for upload upload_id = await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=True, expected_file_size=-1, @@ -527,7 +561,7 @@ async def test_upload_same_file_uuid_aborts_previous_upload( # we should abort the previous upload to prevent unwanted costs await asyncio.sleep(1) new_file_upload_link = await create_upload_file_link_v2( - simcore_file_id, link_type=link_type.value.lower(), file_size=file_size + simcore_file_id, link_type=link_type.value, file_size=file_size ) if link_type == LinkType.PRESIGNED: assert file_upload_link != new_file_upload_link @@ -535,7 +569,7 @@ async def test_upload_same_file_uuid_aborts_previous_upload( assert file_upload_link == new_file_upload_link # we shall have an entry in the db, waiting for upload new_upload_id = await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=True, expected_file_size=-1, @@ -564,6 +598,12 @@ def complex_file_name(faker: Faker) -> str: return f"subfolder_1/sub_folder 2/some file name with spaces and special characters -_ü!öäàé+|}} {{3245_{faker.file_name()}" +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "file_size", [ @@ -583,6 +623,12 @@ async def test_upload_real_file( await upload_file(file_size, complex_file_name) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "file_size", [ @@ -594,7 +640,8 @@ async def test_upload_real_file( async def test_upload_real_file_with_emulated_storage_restart_after_completion_was_called( complex_file_name: str, file_size: ByteSize, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, project_id: ProjectID, node_id: NodeID, @@ -602,7 +649,7 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], create_file_of_size: Callable[[ByteSize, str | None], Path], create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, ): @@ -611,11 +658,11 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w if after running the completion task, storage restarts then the task is lost. Nevertheless the client still has a reference to the completion future and shall be able to ask for its status""" - assert client.app + file = create_file_of_size(file_size, complex_file_name) file_id = create_simcore_file_id(project_id, node_id, complex_file_name) file_upload_link = await create_upload_file_link_v2( - file_id, link_type="presigned", file_size=file_size + file_id, link_type="PRESIGNED", file_size=file_size ) # upload the file part_to_etag: list[UploadedPart] = await upload_file_to_presigned_link( @@ -628,14 +675,15 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w json=jsonable_encoder(FileUploadCompletionBody(parts=part_to_etag)), ) response.raise_for_status() - data, error = await assert_status(response, status.HTTP_202_ACCEPTED) + file_upload_complete_response, error = assert_status( + response, status.HTTP_202_ACCEPTED, FileUploadCompleteResponse + ) assert not error - assert data - file_upload_complete_response = FileUploadCompleteResponse.model_validate(data) + assert file_upload_complete_response state_url = URL(f"{file_upload_complete_response.links.state}").relative() # here we do not check now for the state completion. instead we simulate a restart where the tasks disappear - client.app[UPLOAD_TASKS_KEY].clear() + get_completed_upload_tasks(initialized_app).clear() # now check for the completion completion_etag = None async for attempt in AsyncRetrying( @@ -644,15 +692,19 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w stop=stop_after_delay(60), retry=retry_if_exception_type(AssertionError), ): - with attempt, log_context( - logging.INFO, - f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", - ) as ctx: + with ( + attempt, + log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx, + ): response = await client.post(f"{state_url}") - data, error = await assert_status(response, status.HTTP_200_OK) + future, error = assert_status( + response, status.HTTP_200_OK, FileUploadCompleteFutureResponse + ) assert not error - assert data - future = FileUploadCompleteFutureResponse.model_validate(data) + assert future assert future.state == FileUploadCompleteState.OK assert future.e_tag is not None completion_etag = future.e_tag @@ -662,7 +714,7 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w ) # check the entry in db now has the correct file size, and the upload id is gone await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=file_id, expected_entry_exists=True, expected_file_size=file_size, @@ -679,11 +731,17 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w assert s3_metadata.e_tag == completion_etag +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_upload_of_single_presigned_link_lazily_update_database_on_get( - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, - client: TestClient, + client: httpx.AsyncClient, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], create_file_of_size: Callable[[ByteSize, str | None], Path], create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], @@ -693,7 +751,6 @@ async def test_upload_of_single_presigned_link_lazily_update_database_on_get( get_file_meta_data: Callable[..., Awaitable[FileMetaDataGet]], s3_client: S3Client, ): - assert client.app file_size = TypeAdapter(ByteSize).validate_python("500Mib") file_name = faker.file_name() # create a file @@ -701,7 +758,7 @@ async def test_upload_of_single_presigned_link_lazily_update_database_on_get( simcore_file_id = create_simcore_file_id(project_id, node_id, file_name) # get an S3 upload link file_upload_link = await create_upload_file_link_v2( - simcore_file_id, link_type="s3", file_size=file_size + simcore_file_id, link_type="S3", file_size=file_size ) assert file_upload_link # let's use the storage s3 internal client to upload @@ -723,11 +780,17 @@ async def test_upload_of_single_presigned_link_lazily_update_database_on_get( assert received_fmd.entity_tag == upload_e_tag +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_upload_real_file_with_s3_client( - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, - client: TestClient, + client: httpx.AsyncClient, create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], create_file_of_size: Callable[[ByteSize, str | None], Path], create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], @@ -736,7 +799,6 @@ async def test_upload_real_file_with_s3_client( faker: Faker, s3_client: S3Client, ): - assert client.app file_size = TypeAdapter(ByteSize).validate_python("500Mib") file_name = faker.file_name() # create a file @@ -744,7 +806,7 @@ async def test_upload_real_file_with_s3_client( simcore_file_id = create_simcore_file_id(project_id, node_id, file_name) # get an S3 upload link file_upload_link = await create_upload_file_link_v2( - simcore_file_id, link_type="s3", file_size=file_size + simcore_file_id, link_type="S3", file_size=file_size ) # let's use the storage s3 internal client to upload with file.open("rb") as fp: @@ -766,10 +828,11 @@ async def test_upload_real_file_with_s3_client( with log_context(logging.INFO, f"completing upload of {file=}"): response = await client.post(f"{complete_url}", json={"parts": []}) response.raise_for_status() - data, error = await assert_status(response, status.HTTP_202_ACCEPTED) + file_upload_complete_response, error = assert_status( + response, status.HTTP_202_ACCEPTED, FileUploadCompleteResponse + ) assert not error - assert data - file_upload_complete_response = FileUploadCompleteResponse.model_validate(data) + assert file_upload_complete_response state_url = URL(f"{file_upload_complete_response.links.state}").relative() completion_etag = None async for attempt in AsyncRetrying( @@ -778,18 +841,22 @@ async def test_upload_real_file_with_s3_client( stop=stop_after_delay(60), retry=retry_if_exception_type(ValueError), ): - with attempt, log_context( - logging.INFO, - f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", - ) as ctx: + with ( + attempt, + log_context( + logging.INFO, + f"waiting for upload completion {state_url=}, {attempt.retry_state.attempt_number}", + ) as ctx, + ): response = await client.post(f"{state_url}") response.raise_for_status() - data, error = await assert_status(response, status.HTTP_200_OK) + future, error = assert_status( + response, status.HTTP_200_OK, FileUploadCompleteFutureResponse + ) assert not error - assert data - future = FileUploadCompleteFutureResponse.model_validate(data) + assert future if future.state != FileUploadCompleteState.OK: - msg = f"{data=}" + msg = f"{future=}" raise ValueError(msg) assert future.state == FileUploadCompleteState.OK assert future.e_tag is not None @@ -801,7 +868,7 @@ async def test_upload_real_file_with_s3_client( # check the entry in db now has the correct file size, and the upload id is gone await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=simcore_file_id, expected_entry_exists=True, expected_file_size=file_size, @@ -818,6 +885,12 @@ async def test_upload_real_file_with_s3_client( assert s3_metadata.e_tag == completion_etag +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "file_size", [ @@ -827,8 +900,8 @@ async def test_upload_real_file_with_s3_client( ids=byte_size_ids, ) async def test_upload_twice_and_fail_second_time_shall_keep_first_version( - aiopg_engine: Engine, - client: TestClient, + sqlalchemy_async_engine: AsyncEngine, + client: httpx.AsyncClient, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, with_versioning_enabled: None, @@ -840,18 +913,17 @@ async def test_upload_twice_and_fail_second_time_shall_keep_first_version( user_id: UserID, location_id: LocationID, ): - assert client.app # 1. upload a valid file file_name = faker.file_name() _, uploaded_file_id = await upload_file(file_size, file_name) # 2. create an upload link for the second file upload_link = await create_upload_file_link_v2( - uploaded_file_id, link_type="presigned", file_size=file_size + uploaded_file_id, link_type="PRESIGNED", file_size=file_size ) # we shall have an entry in the db, waiting for upload await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=uploaded_file_id, expected_entry_exists=True, expected_file_size=-1, @@ -863,7 +935,7 @@ async def test_upload_twice_and_fail_second_time_shall_keep_first_version( # 3. upload part of the file to simulate a network issue in the upload new_file = create_file_of_size(file_size, file_name) with pytest.raises(RuntimeError): - async with ClientSession() as session: + async with httpx.AsyncClient() as session: await upload_file_part( session, new_file, @@ -878,11 +950,11 @@ async def test_upload_twice_and_fail_second_time_shall_keep_first_version( # 4. abort file upload abort_url = URL(f"{upload_link.links.abort_upload}").relative() response = await client.post(f"{abort_url}") - await assert_status(response, status.HTTP_204_NO_CONTENT) + assert_status(response, status.HTTP_204_NO_CONTENT, None) # we should have the original file still in now... await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=uploaded_file_id, expected_entry_exists=True, expected_file_size=file_size, @@ -903,7 +975,7 @@ def file_size() -> ByteSize: async def _assert_file_downloaded( - faker: Faker, tmp_path: Path, link: HttpUrl, uploaded_file: Path + faker: Faker, tmp_path: Path, link: AnyUrl, uploaded_file: Path ): dest_file = tmp_path / faker.file_name() async with ClientSession() as session: @@ -917,16 +989,16 @@ async def _assert_file_downloaded( async def test_download_file_no_file_was_uploaded( - client: TestClient, - location_id: int, + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, project_id: ProjectID, node_id: NodeID, user_id: UserID, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, + fake_datcore_tokens: tuple[str, str], ): - assert client.app - missing_file = TypeAdapter(SimcoreS3FileID).validate_python( f"{project_id}/{node_id}/missing.file" ) @@ -936,33 +1008,39 @@ async def test_download_file_no_file_was_uploaded( ) is False ) + download_url = url_from_operation_id( + client, + initialized_app, + "download_file", + location_id=f"{location_id}", + file_id=missing_file, + ).with_query(user_id=user_id) - download_url = ( - client.app.router["download_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(missing_file, safe=""), - ) - .with_query(user_id=user_id) - ) response = await client.get(f"{download_url}") - data, error = await assert_status(response, status.HTTP_404_NOT_FOUND) + data, error = assert_status(response, status.HTTP_404_NOT_FOUND, None) assert data is None - assert missing_file in error["message"] + assert len(error["errors"]) == 1 + assert missing_file in error["errors"][0] +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_download_file_1_to_1_with_file_meta_data( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, file_size: ByteSize, upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], - location_id: int, + location_id: LocationID, user_id: UserID, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, tmp_path: Path, faker: Faker, ): - assert client.app # 2. file_meta_data entry corresponds to a file # upload a single file as a file_meta_data entry and check link uploaded_file, uploaded_file_uuid = await upload_file( @@ -975,46 +1053,50 @@ async def test_download_file_1_to_1_with_file_meta_data( is True ) - download_url = ( - client.app.router["download_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(uploaded_file_uuid, safe=""), - ) - .with_query(user_id=user_id) - ) + download_url = url_from_operation_id( + client, + initialized_app, + "download_file", + location_id=f"{location_id}", + file_id=uploaded_file_uuid, + ).with_query(user_id=user_id) response = await client.get(f"{download_url}") - data, error = await assert_status(response, status.HTTP_200_OK) + data, error = assert_status(response, status.HTTP_200_OK, FileDownloadResponse) assert not error assert data - assert "link" in data - assert TypeAdapter(AnyHttpUrl).validate_python(data["link"]) await _assert_file_downloaded( - faker, tmp_path, link=data["link"], uploaded_file=uploaded_file + faker, tmp_path, link=data.link, uploaded_file=uploaded_file ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_download_file_from_inside_a_directory( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, file_size: ByteSize, - location_id: int, + location_id: LocationID, user_id: UserID, - create_empty_directory: Callable[..., Awaitable[FileUploadSchema]], + project_id: ProjectID, + node_id: NodeID, + create_empty_directory: Callable[ + [str, ProjectID, NodeID], Awaitable[SimcoreS3FileID] + ], create_file_of_size: Callable[[ByteSize, str | None], Path], storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, tmp_path: Path, faker: Faker, ): - assert client.app # 3. file_meta_data entry corresponds to a directory # upload a file inside a directory and check the download link directory_name = "a-test-dir" - directory_file_upload = await create_empty_directory(directory_name) - - assert directory_file_upload.urls[0].path - dir_path_in_s3 = directory_file_upload.urls[0].path.strip("/") + dir_path_in_s3 = await create_empty_directory(directory_name, project_id, node_id) file_name = "meta_data_entry_is_dir.file" file_to_upload_in_dir = create_file_of_size(file_size, file_name) @@ -1036,67 +1118,79 @@ async def test_download_file_from_inside_a_directory( ) # finally check the download link - download_url = ( - client.app.router["download_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(s3_file_id, safe=""), - ) - .with_query(user_id=user_id) - ) + download_url = url_from_operation_id( + client, + initialized_app, + "download_file", + location_id=f"{location_id}", + file_id=s3_file_id, + ).with_query(user_id=user_id) response = await client.get(f"{download_url}") - data, error = await assert_status(response, status.HTTP_200_OK) + file_download, error = assert_status( + response, status.HTTP_200_OK, FileDownloadResponse + ) assert not error - assert data - assert "link" in data - assert TypeAdapter(AnyHttpUrl).validate_python(data["link"]) + assert file_download + await _assert_file_downloaded( - faker, tmp_path, link=data["link"], uploaded_file=file_to_upload_in_dir + faker, tmp_path, link=file_download.link, uploaded_file=file_to_upload_in_dir ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_download_file_the_file_is_missing_from_the_directory( - client: TestClient, - location_id: int, + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, user_id: UserID, - create_empty_directory: Callable[..., Awaitable[FileUploadSchema]], + project_id: ProjectID, + node_id: NodeID, + create_empty_directory: Callable[ + [str, ProjectID, NodeID], Awaitable[SimcoreS3FileID] + ], ): - assert client.app # file_meta_data entry corresponds to a directory but file is not present in directory directory_name = "a-second-test-dir" - directory_file_upload = await create_empty_directory(directory_name) - - assert directory_file_upload.urls[0].path - dir_path_in_s3 = directory_file_upload.urls[0].path.strip("/") + dir_path_in_s3 = await create_empty_directory(directory_name, project_id, node_id) missing_s3_file_id = TypeAdapter(SimcoreS3FileID).validate_python( f"{dir_path_in_s3}/missing_inside_dir.file" ) - download_url = ( - client.app.router["download_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(missing_s3_file_id, safe=""), - ) - .with_query(user_id=user_id) - ) + download_url = url_from_operation_id( + client, + initialized_app, + "download_file", + location_id=f"{location_id}", + file_id=missing_s3_file_id, + ).with_query(user_id=user_id) + response = await client.get(f"{download_url}") - data, error = await assert_status(response, status.HTTP_404_NOT_FOUND) + data, error = assert_status(response, status.HTTP_404_NOT_FOUND, None) assert data is None - assert missing_s3_file_id in error["message"] + assert missing_s3_file_id in error["errors"][0] +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_download_file_access_rights( - client: TestClient, - location_id: int, + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, user_id: UserID, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, faker: Faker, ): - assert client.app - # project_id does not exist missing_file = TypeAdapter(SimcoreS3FileID).validate_python( f"{faker.uuid4()}/{faker.uuid4()}/project_id_is_missing" @@ -1107,21 +1201,26 @@ async def test_download_file_access_rights( ) is False ) + download_url = url_from_operation_id( + client, + initialized_app, + "download_file", + location_id=f"{location_id}", + file_id=missing_file, + ).with_query(user_id=user_id) - download_url = ( - client.app.router["download_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(missing_file, safe=""), - ) - .with_query(user_id=user_id) - ) response = await client.get(f"{download_url}") - data, error = await assert_status(response, status.HTTP_403_FORBIDDEN) + data, error = assert_status(response, status.HTTP_403_FORBIDDEN, None) assert data is None - assert "Insufficient access rights" in error["message"] + assert "Insufficient access rights" in error["errors"][0] +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize( "file_size", [ @@ -1130,33 +1229,32 @@ async def test_download_file_access_rights( ids=byte_size_ids, ) async def test_delete_file( - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, file_size: ByteSize, upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], - location_id: int, + location_id: LocationID, user_id: UserID, faker: Faker, ): - assert client.app _, uploaded_file_uuid = await upload_file(file_size, faker.file_name()) - delete_url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(uploaded_file_uuid, safe=""), - ) - .with_query(user_id=user_id) - ) + delete_url = url_from_operation_id( + client, + initialized_app, + "delete_file", + location_id=f"{location_id}", + file_id=uploaded_file_uuid, + ).with_query(user_id=user_id) response = await client.delete(f"{delete_url}") - await assert_status(response, status.HTTP_204_NO_CONTENT) + assert_status(response, status.HTTP_204_NO_CONTENT, None) # check the entry in db is removed await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=uploaded_file_uuid, expected_entry_exists=False, expected_file_size=None, @@ -1171,8 +1269,15 @@ async def test_delete_file( ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_copy_as_soft_link( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, project_id: ProjectID, node_id: NodeID, @@ -1180,101 +1285,105 @@ async def test_copy_as_soft_link( create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], faker: Faker, ): - assert client.app - # missing simcore_file_id returns 404 missing_file_uuid = create_simcore_file_id(project_id, node_id, faker.file_name()) invalid_link_id = create_simcore_file_id(uuid4(), uuid4(), faker.file_name()) - url = ( - client.app.router["copy_as_soft_link"] - .url_for( - file_id=urllib.parse.quote(missing_file_uuid, safe=""), - ) - .with_query(user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "copy_as_soft_link", + file_id=missing_file_uuid, + ).with_query(user_id=user_id) response = await client.post( f"{url}", json=jsonable_encoder(SoftCopyBody(link_id=invalid_link_id)) ) - await assert_status(response, status.HTTP_404_NOT_FOUND) + assert_status(response, status.HTTP_404_NOT_FOUND, None) # now let's try with whatever link id file, original_file_uuid = await upload_file( TypeAdapter(ByteSize).validate_python("10Mib"), faker.file_name() ) - url = ( - client.app.router["copy_as_soft_link"] - .url_for( - file_id=urllib.parse.quote(original_file_uuid, safe=""), - ) - .with_query(user_id=user_id) - ) + url = url_from_operation_id( + client, + initialized_app, + "copy_as_soft_link", + file_id=original_file_uuid, + ).with_query(user_id=user_id) + link_id = TypeAdapter(SimcoreS3FileID).validate_python( f"api/{node_id}/{faker.file_name()}" ) response = await client.post( f"{url}", json=jsonable_encoder(SoftCopyBody(link_id=link_id)) ) - data, error = await assert_status(response, status.HTTP_200_OK) + fmd, error = assert_status(response, status.HTTP_200_OK, FileMetaDataGet) assert not error - fmd = TypeAdapter(FileMetaDataGet).validate_python(data) + assert fmd assert fmd.file_id == link_id -async def __list_files( - client: TestClient, +async def _list_files( + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, location_id: LocationID, *, - path: str, expand_dirs: bool, ) -> list[FileMetaDataGet]: - assert client.app - get_url = ( - client.app.router["get_files_metadata"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(path, safe=""), - ) - .with_query(user_id=user_id, expand_dirs=f"{expand_dirs}".lower()) - ) + get_url = url_from_operation_id( + client, + initialized_app, + "list_files_metadata", + location_id=f"{location_id}", + ).with_query(user_id=user_id, expand_dirs=f"{expand_dirs}".lower()) response = await client.get(f"{get_url}") - data, error = await assert_status(response, status.HTTP_200_OK) + fmds, error = assert_status(response, status.HTTP_200_OK, list[FileMetaDataGet]) assert not error - return TypeAdapter(list[FileMetaDataGet]).validate_python(data) + assert fmds is not None + return fmds async def _list_files_legacy( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, location_id: LocationID, - directory_file_upload: FileUploadSchema, ) -> list[FileMetaDataGet]: - assert directory_file_upload.urls[0].path - directory_file_id = directory_file_upload.urls[0].path.strip("/") - return await __list_files( - client, user_id, location_id, path=directory_file_id, expand_dirs=True + return await _list_files( + initialized_app, + client, + user_id, + location_id, + expand_dirs=True, ) async def _list_files_and_directories( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, location_id: LocationID, - directory_file_upload: FileUploadSchema, ) -> list[FileMetaDataGet]: - assert directory_file_upload.urls[0].path - directory_parent_path = Path(directory_file_upload.urls[0].path).parent - directory_file_id = f"{directory_parent_path}".strip("/") - return await __list_files( - client, user_id, location_id, path=directory_file_id, expand_dirs=False + return await _list_files( + initialized_app, + client, + user_id, + location_id, + expand_dirs=False, ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize("link_type", LinkType) @pytest.mark.parametrize( "file_size", [ - ByteSize(-1), + ByteSize(0), TypeAdapter(ByteSize).validate_python("0"), TypeAdapter(ByteSize).validate_python("1TB"), ], @@ -1284,7 +1393,8 @@ async def test_is_directory_link_forces_link_type_and_size( node_id: NodeID, create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, link_type: LinkType, @@ -1294,7 +1404,7 @@ async def test_is_directory_link_forces_link_type_and_size( directory_file_id = create_simcore_file_id(project_id, node_id, DIR_NAME) directory_file_upload: FileUploadSchema = await create_upload_file_link_v2( directory_file_id, - link_type=link_type.value.lower(), + link_type=link_type.value, is_directory="true", file_size=file_size, ) @@ -1302,7 +1412,7 @@ async def test_is_directory_link_forces_link_type_and_size( assert len(directory_file_upload.urls) == 1 files_and_directories: list[FileMetaDataGet] = await _list_files_and_directories( - client, user_id, location_id, directory_file_upload + initialized_app, client, user_id, location_id ) assert len(files_and_directories) == 1 assert files_and_directories[0].is_directory is True @@ -1310,26 +1420,30 @@ async def test_is_directory_link_forces_link_type_and_size( assert files_and_directories[0].file_size == 0 +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_ensure_expand_dirs_defaults_true( + initialized_app: FastAPI, mocker: MockerFixture, - client: TestClient, + client: httpx.AsyncClient, user_id: UserID, - location_id: int, + location_id: LocationID, ): mocked_object = mocker.patch( "simcore_service_storage.simcore_s3_dsm.SimcoreS3DataManager.list_files", autospec=True, ) - assert client.app - get_url = ( - client.app.router["get_files_metadata"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote("mocked_path", safe=""), - ) - .with_query(user_id=user_id) - ) + get_url = url_from_operation_id( + client, + initialized_app, + "list_files_metadata", + location_id=f"{location_id}", + ).with_query(user_id=user_id) await client.get(f"{get_url}") assert len(mocked_object.call_args_list) == 1 @@ -1338,189 +1452,215 @@ async def test_ensure_expand_dirs_defaults_true( assert call_args_list.kwargs["expand_dirs"] is True +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_upload_file_is_directory_and_remove_content( - create_empty_directory: Callable[..., Awaitable[FileUploadSchema]], - populate_directory: Callable[..., Awaitable[None]], + initialized_app: FastAPI, + create_empty_directory: Callable[ + [str, ProjectID, NodeID], Awaitable[SimcoreS3FileID] + ], + populate_directory: Callable[ + [ByteSize, str, ProjectID, NodeID, int, int], + Awaitable[tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]], + ], delete_directory: Callable[..., Awaitable[None]], - client: TestClient, + client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, - faker: Faker, + project_id: ProjectID, + node_id: NodeID, ): FILE_SIZE_IN_DIR = TypeAdapter(ByteSize).validate_python("1Mib") DIR_NAME = "some-dir" SUBDIR_COUNT = 4 - FILE_COUNT = 5 + FILE_COUNT = 20 # DIRECTORY CREATION (is empty) - directory_file_upload: FileUploadSchema = await create_empty_directory( - dir_name=DIR_NAME - ) + directory_in_s3 = await create_empty_directory(DIR_NAME, project_id, node_id) files_and_directories: list[FileMetaDataGet] = await _list_files_and_directories( - client, user_id, location_id, directory_file_upload + initialized_app, client, user_id, location_id ) assert len(files_and_directories) == 1 list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - client, user_id, location_id, directory_file_upload + initialized_app, client, user_id, location_id ) assert len(list_of_files) == 0 # DIRECTORY WITH CONTENT await populate_directory( - file_size_in_dir=FILE_SIZE_IN_DIR, - dir_name=DIR_NAME, - subdir_count=SUBDIR_COUNT, - file_count=FILE_COUNT, + FILE_SIZE_IN_DIR, + DIR_NAME, + project_id, + node_id, + SUBDIR_COUNT, + FILE_COUNT, ) files_and_directories: list[FileMetaDataGet] = await _list_files_and_directories( - client, user_id, location_id, directory_file_upload + initialized_app, client, user_id, location_id ) assert len(files_and_directories) == 1 list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - client, user_id, location_id, directory_file_upload + initialized_app, client, user_id, location_id ) - assert len(list_of_files) == SUBDIR_COUNT * FILE_COUNT + assert len(list_of_files) == FILE_COUNT # DELETE NOT EXISTING - assert client.app - - delete_url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote( - "/".join(list_of_files[0].file_id.split("/")[:2]) + "/does_not_exist", - safe="", - ), - ) - .with_query(user_id=user_id) - ) + delete_url = url_from_operation_id( + client, + initialized_app, + "delete_file", + location_id=f"{location_id}", + file_id="/".join(list_of_files[0].file_id.split("/")[:2]) + "/does_not_exist", + ).with_query(user_id=user_id) response = await client.delete(f"{delete_url}") - _, error = await assert_status(response, status.HTTP_204_NO_CONTENT) + _, error = assert_status(response, status.HTTP_204_NO_CONTENT, None) assert error is None list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - client, user_id, location_id, directory_file_upload + initialized_app, client, user_id, location_id ) - assert len(list_of_files) == SUBDIR_COUNT * FILE_COUNT + assert len(list_of_files) == FILE_COUNT # DELETE ONE FILE FROM THE DIRECTORY - assert client.app - delete_url = ( - client.app.router["delete_file"] - .url_for( - location_id=f"{location_id}", - file_id=urllib.parse.quote(list_of_files[0].file_id, safe=""), - ) - .with_query(user_id=user_id) - ) + delete_url = url_from_operation_id( + client, + initialized_app, + "delete_file", + location_id=f"{location_id}", + file_id=list_of_files[0].file_id, + ).with_query(user_id=user_id) response = await client.delete(f"{delete_url}") - _, error = await assert_status(response, status.HTTP_204_NO_CONTENT) + _, error = assert_status(response, status.HTTP_204_NO_CONTENT, None) assert error is None - list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - client, user_id, location_id, directory_file_upload + list_of_files = await _list_files_legacy( + initialized_app, client, user_id, location_id ) - assert len(list_of_files) == SUBDIR_COUNT * FILE_COUNT - 1 + assert len(list_of_files) == FILE_COUNT - 1 # DIRECTORY REMOVAL - await delete_directory(directory_file_upload=directory_file_upload) + await delete_directory(directory_in_s3) - list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - client, user_id, location_id, directory_file_upload + list_of_files = await _list_files_legacy( + initialized_app, client, user_id, location_id ) assert len(list_of_files) == 0 - files_and_directories: list[FileMetaDataGet] = await _list_files_and_directories( - client, user_id, location_id, directory_file_upload + files_and_directories = await _list_files_and_directories( + initialized_app, client, user_id, location_id ) assert len(files_and_directories) == 0 -@pytest.mark.parametrize("files_in_dir", [1002]) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize("files_count", [1002]) async def test_listing_more_than_1000_objects_in_bucket( create_directory_with_files: Callable[ - ..., AbstractAsyncContextManager[FileUploadSchema] + [str, ByteSize, int, int, ProjectID, NodeID], + Awaitable[ + tuple[SimcoreS3FileID, tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + ], ], - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, - files_in_dir: int, + project_id: ProjectID, + node_id: NodeID, + files_count: int, ): - async with create_directory_with_files( - dir_name="some-random", - file_size_in_dir=TypeAdapter(ByteSize).validate_python("1"), - subdir_count=1, - file_count=files_in_dir, - ) as directory_file_upload: - list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - client, user_id, location_id, directory_file_upload - ) - # for now no more than 1000 objects will be returned - assert len(list_of_files) == 1000 + SUBDIR_COUNT = 1 + await create_directory_with_files( + "random-directory", + TypeAdapter(ByteSize).validate_python("1"), + SUBDIR_COUNT, + files_count, + project_id, + node_id, + ) + list_of_files = await _list_files_legacy( + initialized_app, client, user_id, location_id + ) + # for now no more than 1000 objects will be returned + assert len(list_of_files) == 1000 +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize("uuid_filter", [True, False]) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=1, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=0, + ), + ], + ids=str, +) async def test_listing_with_project_id_filter( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, faker: Faker, random_project_with_files: Callable[ - [int, tuple[ByteSize, ...]], + [ProjectWithFilesParams], Awaitable[ - tuple[ - dict[str, Any], - dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], - ] + tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] ], ], uuid_filter: bool, + project_params: ProjectWithFilesParams, ): - project, src_projects_list = await random_project_with_files( - num_nodes=1, - file_sizes=(ByteSize(1),), - file_checksums=(TypeAdapter(SHA256Str).validate_python(faker.sha256()),), - ) - _, _ = await random_project_with_files( - num_nodes=1, - file_sizes=(ByteSize(1),), - file_checksums=(TypeAdapter(SHA256Str).validate_python(faker.sha256()),), - ) + src_project, src_projects_list = await random_project_with_files(project_params) + _, _ = await random_project_with_files(project_params) assert len(src_projects_list.keys()) > 0 node_id = next(iter(src_projects_list.keys())) project_files_in_db = set(src_projects_list[node_id]) assert len(project_files_in_db) > 0 - project_id = project["uuid"] + project_id = src_project["uuid"] project_file_name = Path(choice(list(project_files_in_db))).name # noqa: S311 - assert client.app query = { "user_id": user_id, - "project_id": project_id, + "project_id": f"{project_id}", "uuid_filter": project_file_name if uuid_filter else None, } - url = ( - client.app.router["get_files_metadata"] - .url_for(location_id=f"{location_id}") - .with_query(**{k: v for k, v in query.items() if v is not None}) - ) + url = url_from_operation_id( + client, initialized_app, "list_files_metadata", location_id=f"{location_id}" + ).with_query(**{k: v for k, v in query.items() if v is not None}) response = await client.get(f"{url}") - data, _ = await assert_status(response, status.HTTP_200_OK) - - list_of_files = TypeAdapter(list[FileMetaDataGet]).validate_python(data) + list_of_files, _ = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) + assert list_of_files if uuid_filter: assert len(list_of_files) == 1 diff --git a/services/storage/tests/unit/test_handlers_files_metadata.py b/services/storage/tests/unit/test_handlers_files_metadata.py index 9abd834d21a..dd8bd4a2728 100644 --- a/services/storage/tests/unit/test_handlers_files_metadata.py +++ b/services/storage/tests/unit/test_handlers_files_metadata.py @@ -2,23 +2,29 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument - -import urllib.parse from collections.abc import Awaitable, Callable from copy import deepcopy from pathlib import Path from random import choice from typing import Protocol +import httpx import pytest -from aiohttp.test_utils import TestClient from faker import Faker -from models_library.api_schemas_storage import FileMetaDataGet, SimcoreS3FileID +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + SimcoreS3FileID, +) from models_library.projects import ProjectID +from models_library.projects_nodes_io import LocationID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status from servicelib.aiohttp import status +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager +from yarl import URL pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] @@ -32,34 +38,42 @@ async def __call__( read: bool, write: bool, delete: bool, - ) -> None: - ... + ) -> None: ... -async def test_get_files_metadata( +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +async def test_list_files_metadata( upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], create_project_access_rights: CreateProjectAccessRightsCallable, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, other_user_id: UserID, - location_id: int, + location_id: LocationID, project_id: ProjectID, faker: Faker, ): - assert client.app - url = ( - client.app.router["get_files_metadata"] - .url_for(location_id=f"{location_id}") + URL(f"{client.base_url}") + .with_path( + initialized_app.url_path_for("list_files_metadata", location_id=location_id) + ) .with_query(user_id=f"{user_id}") ) # this should return an empty list response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) + assert list_fmds == [] assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) - assert not list_fmds # now add some stuff there NUM_FILES = 10 @@ -71,9 +85,11 @@ async def test_get_files_metadata( # we should find these files now response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) + assert list_fmds assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) assert len(list_fmds) == NUM_FILES # checks project_id filter! @@ -84,15 +100,18 @@ async def test_get_files_metadata( write=True, delete=True, ) + previous_data = deepcopy(list_fmds) response = await client.get( f"{url.update_query(project_id=str(project_id), user_id=other_user_id)}" ) - previous_data = deepcopy(data) - data, error = await assert_status(response, status.HTTP_200_OK) + + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) + assert list_fmds assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) assert len(list_fmds) == (NUM_FILES) - assert previous_data == data + assert previous_data == list_fmds # create some more files but with a base common name NUM_FILES = 10 @@ -105,16 +124,20 @@ async def test_get_files_metadata( # we should find these files now response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) + assert list_fmds assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) assert len(list_fmds) == (2 * NUM_FILES) # we can filter them now response = await client.get(f"{url.update_query(uuid_filter='common_name')}") - data, error = await assert_status(response, status.HTTP_200_OK) + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) + assert list_fmds assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) assert len(list_fmds) == (NUM_FILES) @@ -122,72 +145,80 @@ async def test_get_files_metadata( reason="storage get_file_metadata must return a 200 with no payload as long as legacy services are around!!" ) async def test_get_file_metadata_is_legacy_services_compatible( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, - location_id: int, + location_id: LocationID, simcore_file_id: SimcoreS3FileID, ): - assert client.app - url = ( - client.app.router["get_file_metadata"] - .url_for( - location_id=f"{location_id}", - file_id=f"{urllib.parse.quote(simcore_file_id, safe='')}", + URL(f"{client.base_url}") + .with_path( + initialized_app.url_path_for( + "get_file_metadata", + location_id=location_id, + file_id=simcore_file_id, + ) ) .with_query(user_id=f"{user_id}") ) + # this should return an empty list response = await client.get(f"{url}") - await assert_status(response, status.HTTP_404_NOT_FOUND) + assert response.status_code == status.HTTP_404_NOT_FOUND +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_get_file_metadata( upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, - location_id: int, + location_id: LocationID, project_id: ProjectID, simcore_file_id: SimcoreS3FileID, faker: Faker, ): - assert client.app + url = url_from_operation_id( + client, + initialized_app, + "get_file_metadata", + location_id=f"{location_id}", + file_id=simcore_file_id, + ).with_query(user_id=user_id) - url = ( - client.app.router["get_file_metadata"] - .url_for( - location_id=f"{location_id}", - file_id=f"{urllib.parse.quote(simcore_file_id, safe='')}", - ) - .with_query(user_id=f"{user_id}") - ) # this should return an empty list response = await client.get(f"{url}") # await assert_status(response, status.HTTP_404_NOT_FOUND) # NOTE: This needs to be a Ok response with empty data until ALL legacy services are gone, then it should be changed to 404! see test above - assert response.status == status.HTTP_200_OK - assert await response.json() == {"data": {}, "error": "No result found"} + data, error = assert_status(response, status.HTTP_200_OK, dict) + assert error == "No result found" + assert data == {} # now add some stuff there NUM_FILES = 10 file_size = TypeAdapter(ByteSize).validate_python("15Mib") - files_owned_by_us = [] - for _ in range(NUM_FILES): - files_owned_by_us.append(await upload_file(file_size, faker.file_name())) - selected_file, selected_file_uuid = choice(files_owned_by_us) - url = ( - client.app.router["get_file_metadata"] - .url_for( - location_id=f"{location_id}", - file_id=f"{urllib.parse.quote(selected_file_uuid, safe='')}", - ) - .with_query(user_id=f"{user_id}") - ) + files_owned_by_us = [ + await upload_file(file_size, faker.file_name()) for _ in range(NUM_FILES) + ] + selected_file, selected_file_uuid = choice(files_owned_by_us) # noqa: S311 + url = url_from_operation_id( + client, + initialized_app, + "get_file_metadata", + location_id=f"{location_id}", + file_id=selected_file_uuid, + ).with_query(user_id=user_id) + response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + fmd, error = assert_status(response, status.HTTP_200_OK, FileMetaDataGet) assert not error - assert data - fmd = TypeAdapter(FileMetaDataGet).validate_python(data) + assert fmd assert fmd.file_id == selected_file_uuid assert fmd.file_size == selected_file.stat().st_size diff --git a/services/storage/tests/unit/test_handlers_health.py b/services/storage/tests/unit/test_handlers_health.py index 8705c4c8e36..640fbb376b2 100644 --- a/services/storage/tests/unit/test_handlers_health.py +++ b/services/storage/tests/unit/test_handlers_health.py @@ -4,50 +4,50 @@ # pylint: disable=protected-access +import httpx import simcore_service_storage._meta -from aiohttp.test_utils import TestClient -from models_library.api_schemas_storage import S3BucketName +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import HealthCheck, S3BucketName from models_library.app_diagnostics import AppStatusCheck from moto.server import ThreadedMotoServer -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status from servicelib.aiohttp import status -from simcore_service_storage.handlers_health import HealthCheck from types_aiobotocore_s3 import S3Client pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] -async def test_health_check(client: TestClient): - assert client.app - url = client.app.router["health_check"].url_for() +async def test_health_check(initialized_app: FastAPI, client: httpx.AsyncClient): + url = url_from_operation_id(client, initialized_app, "get_health") response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + app_health, error = assert_status(response, status.HTTP_200_OK, HealthCheck) + assert app_health assert not error - app_health = HealthCheck.model_validate(data) assert app_health.name == simcore_service_storage._meta.PROJECT_NAME # noqa: SLF001 assert app_health.version == str( - simcore_service_storage._meta.VERSION - ) # noqa: SLF001 + simcore_service_storage._meta.VERSION # noqa: SLF001 + ) -async def test_health_status(client: TestClient): - assert client.app - url = client.app.router["get_status"].url_for() +async def test_health_status(initialized_app: FastAPI, client: httpx.AsyncClient): + url = url_from_operation_id(client, initialized_app, "get_status") response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + app_status_check, error = assert_status( + response, status.HTTP_200_OK, AppStatusCheck + ) + assert app_status_check assert not error - app_status_check = AppStatusCheck.model_validate(data) assert ( - app_status_check.app_name == simcore_service_storage._meta.PROJECT_NAME - ) # noqa: SLF001 + app_status_check.app_name + == simcore_service_storage._meta.PROJECT_NAME # noqa: SLF001 + ) assert app_status_check.version == str( - simcore_service_storage._meta.VERSION - ) # noqa: SLF001 + simcore_service_storage._meta.VERSION # noqa: SLF001 + ) assert len(app_status_check.services) == 2 assert "postgres" in app_status_check.services assert "healthy" in app_status_check.services["postgres"] @@ -58,55 +58,61 @@ async def test_health_status(client: TestClient): async def test_bad_health_status_if_bucket_missing( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, storage_s3_bucket: S3BucketName, s3_client: S3Client, ): - assert client.app - url = client.app.router["get_status"].url_for() + url = url_from_operation_id(client, initialized_app, "get_status") response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + app_status_check, error = assert_status( + response, status.HTTP_200_OK, AppStatusCheck + ) + assert app_status_check assert not error - app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "connected" # now delete the bucket await s3_client.delete_bucket(Bucket=storage_s3_bucket) # check again the health response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + app_status_check, error = assert_status( + response, status.HTTP_200_OK, AppStatusCheck + ) + assert app_status_check assert not error - app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "no access to S3 bucket" async def test_bad_health_status_if_s3_server_missing( - client: TestClient, mocked_aws_server: ThreadedMotoServer + initialized_app: FastAPI, + client: httpx.AsyncClient, + mocked_aws_server: ThreadedMotoServer, ): - assert client.app - url = client.app.router["get_status"].url_for() + url = url_from_operation_id(client, initialized_app, "get_status") response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + app_status_check, error = assert_status( + response, status.HTTP_200_OK, AppStatusCheck + ) + assert app_status_check assert not error - app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "connected" # now disable the s3 server mocked_aws_server.stop() # check again the health response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + app_status_check, error = assert_status( + response, status.HTTP_200_OK, AppStatusCheck + ) + assert app_status_check assert not error - app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "failed" # start the server again mocked_aws_server.start() # should be good again response = await client.get(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) - assert data + app_status_check, error = assert_status( + response, status.HTTP_200_OK, AppStatusCheck + ) + assert app_status_check assert not error - app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "connected" diff --git a/services/storage/tests/unit/test_handlers_locations.py b/services/storage/tests/unit/test_handlers_locations.py index cc236499ee9..4aae75e69de 100644 --- a/services/storage/tests/unit/test_handlers_locations.py +++ b/services/storage/tests/unit/test_handlers_locations.py @@ -4,69 +4,56 @@ # pylint:disable=too-many-arguments # pylint:disable=no-name-in-module -from typing import Any -import pytest -from aiohttp.test_utils import TestClient +import httpx +from fastapi import FastAPI, status +from models_library.api_schemas_storage.storage_schemas import FileLocation from models_library.users import UserID -from pytest_simcore.helpers.assert_checks import assert_status -from servicelib.aiohttp import status -from tests.helpers.utils import has_datcore_tokens +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status +from simcore_service_storage.datcore_dsm import DatCoreDataManager +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] -async def test_locations(client: TestClient, user_id: UserID): - resp = await client.get(f"/v0/locations?user_id={user_id}") - - payload = await resp.json() - assert resp.status == 200, str(payload) - - data, error = tuple(payload.get(k) for k in ("data", "error")) - - _locs = 2 if has_datcore_tokens() else 1 - assert len(data) == _locs - assert not error - - -@pytest.mark.parametrize( - "dry_run, fire_and_forget, expected_removed", - [ - (None, None, []), - (True, False, []), - (True, True, []), - (False, True, []), - (False, False, []), - ], -) -async def test_synchronise_meta_data_table( - client: TestClient, - location_id: int, +async def test_locations( + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, - dry_run: bool | None, - fire_and_forget: bool | None, - expected_removed: list, + fake_datcore_tokens: tuple[str, str], ): - assert client.app - query_params: dict[str, Any] = {"user_id": user_id} - if dry_run: - query_params["dry_run"] = f"{dry_run}" - if fire_and_forget: - query_params["fire_and_forget"] = f"{fire_and_forget}" - url = ( - client.app.router["synchronise_meta_data_table"] - .url_for(location_id=f"{location_id}") - .with_query(**query_params) + url = url_from_operation_id( + client, initialized_app, "list_storage_locations" + ).with_query(user_id=user_id) + response = await client.get(f"{url}") + data, _ = assert_status(response, status.HTTP_200_OK, list[FileLocation]) + assert data + assert len(data) == 2 + assert data[0] == FileLocation( + id=SimcoreS3DataManager.get_location_id(), + name=SimcoreS3DataManager.get_location_name(), ) - resp = await client.post( - f"{url}", + assert data[1] == FileLocation( + id=DatCoreDataManager.get_location_id(), + name=DatCoreDataManager.get_location_name(), ) - data, error = await assert_status(resp, status.HTTP_200_OK) - assert not error + + +async def test_locations_without_tokens( + initialized_app: FastAPI, + client: httpx.AsyncClient, + user_id: UserID, +): + url = url_from_operation_id( + client, initialized_app, "list_storage_locations" + ).with_query(user_id=user_id) + response = await client.get(f"{url}") + data, _ = assert_status(response, status.HTTP_200_OK, list[FileLocation]) assert data - assert data["dry_run"] == (False if dry_run is None else dry_run) - assert data["fire_and_forget"] == ( - False if fire_and_forget is None else fire_and_forget + assert len(data) == 1 + assert data[0] == FileLocation( + id=SimcoreS3DataManager.get_location_id(), + name=SimcoreS3DataManager.get_location_name(), ) - assert data["removed"] == expected_removed diff --git a/services/storage/tests/unit/test_handlers_paths.py b/services/storage/tests/unit/test_handlers_paths.py new file mode 100644 index 00000000000..31cb5c85061 --- /dev/null +++ b/services/storage/tests/unit/test_handlers_paths.py @@ -0,0 +1,750 @@ +# pylint:disable=no-name-in-module +# pylint:disable=protected-access +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=too-many-positional-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable + + +import random +from collections.abc import Awaitable, Callable +from pathlib import Path +from typing import Any, TypeAlias +from urllib.parse import quote + +import httpx +import pytest +import sqlalchemy as sa +from faker import Faker +from fastapi import FastAPI, status +from fastapi_pagination.cursor import CursorPage +from models_library.api_schemas_storage.storage_schemas import ( + PathMetaDataGet, + PathTotalSizeCreate, +) +from models_library.api_schemas_webserver.storage import MAX_NUMBER_OF_PATHS_PER_PAGE +from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID +from models_library.users import UserID +from pydantic import ByteSize, TypeAdapter +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status +from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams +from simcore_postgres_database.models.projects import projects +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager +from sqlalchemy.ext.asyncio import AsyncEngine + +pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_ops_services_selection = ["adminer"] + +_IsFile: TypeAlias = bool + + +def _filter_and_group_paths_one_level_deeper( + paths: list[Path], prefix: Path +) -> list[tuple[Path, _IsFile]]: + relative_paths = (path for path in paths if path.is_relative_to(prefix)) + return sorted( + { + ( + (path, len(path.relative_to(prefix).parts) == 1) + if len(path.relative_to(prefix).parts) == 1 + else (prefix / path.relative_to(prefix).parts[0], False) + ) + for path in relative_paths + }, + key=lambda x: x[0], + ) + + +async def _assert_list_paths( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + *, + file_filter: Path | None, + limit: int = 25, + expected_paths: list[tuple[Path, _IsFile]], + check_total: bool = True, +) -> CursorPage[PathMetaDataGet]: + offset = 0 + total_expected = len(expected_paths) + next_cursor = 0 # NOTE: this will initialize + total_received = 0 + while next_cursor is not None: + url = url_from_operation_id( + client, initialized_app, "list_paths", location_id=f"{location_id}" + ).with_query( + user_id=user_id, + size=limit, + ) + if next_cursor: + url = url.update_query(cursor=next_cursor) + + if file_filter is not None: + url = url.update_query(file_filter=f"{file_filter}") + response = await client.get(f"{url}") + + page_of_files, _ = assert_status( + response, + status.HTTP_200_OK, + CursorPage[PathMetaDataGet], + expect_envelope=False, + ) + assert page_of_files + assert len(page_of_files.items) == min(limit, total_expected - offset) + + for (expected_path, is_file), received_path in zip( + expected_paths[offset : offset + limit], page_of_files.items, strict=True + ): + assert received_path.path == expected_path + if is_file: + assert received_path.file_meta_data is not None + else: + assert received_path.file_meta_data is None + + if check_total: + assert page_of_files.total == total_expected + else: + assert page_of_files.total is None + next_cursor = page_of_files.next_page + total_received += len(page_of_files.items) + offset += limit + assert total_received == total_expected + assert page_of_files.next_page is None + return page_of_files + + +async def test_list_paths_root_folder_of_empty_returns_nothing( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + fake_datcore_tokens: tuple[str, str], +): + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=None, + expected_paths=[], + ) + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=10, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=10, + ) + ], + ids=str, +) +async def test_list_paths_pagination( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ], +): + project, list_of_files = with_random_project_with_files + num_nodes = len(list(project["workbench"])) + + # ls the nodes (DB-based) + file_filter = Path(project["uuid"]) + expected_paths = sorted( + ((file_filter / node_key, False) for node_key in project["workbench"]), + key=lambda x: x[0], + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=file_filter, + expected_paths=expected_paths, + limit=int(num_nodes / 2 + 0.5), + ) + + # ls in the workspace (S3-based) + # ls in the workspace + selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + selected_node_s3_keys = [ + Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] + ] + workspace_file_filter = file_filter / f"{selected_node_id}" / "workspace" + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, workspace_file_filter + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=workspace_file_filter, + expected_paths=expected_paths, + limit=1, + check_total=False, + ) + # ls in until we get to some files + while selected_subfolders := [p for p in expected_paths if p[1] is False]: + selected_path_filter = random.choice(selected_subfolders) # noqa: S311 + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, selected_path_filter[0] + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=selected_path_filter[0], + expected_paths=expected_paths, + check_total=False, + ) + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=1, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("0b"),), + workspace_files_count=MAX_NUMBER_OF_PATHS_PER_PAGE, + ) + ], + ids=str, +) +async def test_list_paths_pagination_large_page( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ], +): + project, list_of_files = with_random_project_with_files + selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + selected_node_s3_keys = [ + Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] + ] + workspace_file_filter = Path(project["uuid"]) / f"{selected_node_id}" / "workspace" + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, workspace_file_filter + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=workspace_file_filter, + expected_paths=expected_paths, + check_total=False, + limit=MAX_NUMBER_OF_PATHS_PER_PAGE, + ) + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params, num_projects", + [ + ( + ProjectWithFilesParams( + num_nodes=3, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=10, + ), + 3, + ) + ], + ids=str, +) +async def test_list_paths( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + random_project_with_files: Callable[ + [ProjectWithFilesParams], + Awaitable[ + tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + ], + ], + project_params: ProjectWithFilesParams, + num_projects: int, +): + project_to_files_mapping = [ + await random_project_with_files(project_params) for _ in range(num_projects) + ] + project_to_files_mapping.sort(key=lambda x: x[0]["uuid"]) + + # ls root returns our projects + expected_paths = sorted( + ((Path(f"{prj_db['uuid']}"), False) for prj_db, _ in project_to_files_mapping), + key=lambda x: x[0], + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=None, + expected_paths=expected_paths, + ) + + # ls with only some part of the path should return only the projects that match + selected_project, selected_project_files = random.choice( # noqa: S311 + project_to_files_mapping + ) + partial_file_filter = Path( + selected_project["uuid"][: len(selected_project["uuid"]) // 2] + ) + partial_expected_paths = [ + p for p in expected_paths if f"{p[0]}".startswith(f"{partial_file_filter}") + ] + + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=partial_file_filter, + expected_paths=partial_expected_paths, + ) + + # now we ls inside one of the projects returns the nodes + file_filter = Path(selected_project["uuid"]) + expected_paths = sorted( + ((file_filter / node_key, False) for node_key in selected_project["workbench"]), + key=lambda x: x[0], + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=file_filter, + expected_paths=expected_paths, + ) + + # now we ls in one of the nodes + selected_node_id = NodeID( + random.choice(list(selected_project["workbench"])) # noqa: S311 + ) + selected_node_s3_keys = [ + Path(s3_object_id) for s3_object_id in selected_project_files[selected_node_id] + ] + file_filter = file_filter / f"{selected_node_id}" + expected_node_files = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, + file_filter, + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=file_filter, + expected_paths=expected_node_files, + ) + + # ls in the outputs will list 1 entry which is a folder + node_outputs_file_filter = file_filter / "outputs" + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, node_outputs_file_filter + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=node_outputs_file_filter, + expected_paths=expected_paths, + ) + + # ls in output_3 shall reveal the file + node_outputs_file_filter = file_filter / "outputs" / "output_3" + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, node_outputs_file_filter + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=node_outputs_file_filter, + expected_paths=expected_paths, + ) + + # ls in the workspace + workspace_file_filter = file_filter / "workspace" + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, workspace_file_filter + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=workspace_file_filter, + expected_paths=expected_paths, + check_total=False, + ) + # ls in until we get to some files + while selected_subfolders := [p for p in expected_paths if p[1] is False]: + selected_path_filter = random.choice(selected_subfolders) # noqa: S311 + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, selected_path_filter[0] + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=selected_path_filter[0], + expected_paths=expected_paths, + check_total=False, + ) + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=1, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("0b"),), + workspace_files_count=10, + ) + ], + ids=str, +) +async def test_list_paths_with_display_name_containing_slashes( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ], + sqlalchemy_async_engine: AsyncEngine, +): + project, list_of_files = with_random_project_with_files + project_name_with_slashes = "soméà$èq¨thing with/ slas/h/es/" + node_name_with_non_ascii = "my node / is not ascii: éàèù" + # adjust project to contain "difficult" characters + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + sa.update(projects) + .where(projects.c.uuid == project["uuid"]) + .values(name=project_name_with_slashes) + .returning(sa.literal_column(f"{projects.c.name}, {projects.c.workbench}")) + ) + row = result.one() + assert row.name == project_name_with_slashes + project_workbench = row.workbench + assert len(project_workbench) == 1 + node = next(iter(project_workbench.values())) + node["label"] = node_name_with_non_ascii + result = await conn.execute( + sa.update(projects) + .where(projects.c.uuid == project["uuid"]) + .values(workbench=project_workbench) + .returning(sa.literal_column(f"{projects.c.name}, {projects.c.workbench}")) + ) + row = result.one() + + # ls the root + file_filter = None + expected_paths = [(Path(project["uuid"]), False)] + + page_of_paths = await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=file_filter, + expected_paths=expected_paths, + ) + + assert page_of_paths.items[0].display_path == Path( + quote(project_name_with_slashes, safe="") + ), "display path parts should be url encoded" + + # ls the nodes to ensure / is still there between project and node + file_filter = Path(project["uuid"]) + expected_paths = sorted( + ((file_filter / node_key, False) for node_key in project["workbench"]), + key=lambda x: x[0], + ) + assert len(expected_paths) == 1, "test configuration problem" + page_of_paths = await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=file_filter, + expected_paths=expected_paths, + ) + assert page_of_paths.items[0].display_path == Path( + quote(project_name_with_slashes, safe="") + ) / quote( + node_name_with_non_ascii, safe="" + ), "display path parts should be url encoded" + + # ls in the node workspace + selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + selected_node_s3_keys = [ + Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] + ] + workspace_file_filter = file_filter / f"{selected_node_id}" / "workspace" + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, workspace_file_filter + ) + await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=workspace_file_filter, + expected_paths=expected_paths, + check_total=False, + ) + + # ls in until we get to some files + while selected_subfolders := [p for p in expected_paths if p[1] is False]: + selected_path_filter = random.choice(selected_subfolders) # noqa: S311 + expected_paths = _filter_and_group_paths_one_level_deeper( + selected_node_s3_keys, selected_path_filter[0] + ) + page_of_paths = await _assert_list_paths( + initialized_app, + client, + location_id, + user_id, + file_filter=selected_path_filter[0], + expected_paths=expected_paths, + check_total=False, + ) + + expected_display_path = "/".join( + [ + quote(project_name_with_slashes, safe=""), + quote(node_name_with_non_ascii, safe=""), + *(expected_paths[0][0].parts[2:]), + ], + ) + assert page_of_paths.items[0].display_path == Path( + expected_display_path + ), "display path parts should be url encoded" + + +async def _assert_compute_path_size( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + *, + path: Path, + expected_total_size: int, +) -> ByteSize: + url = url_from_operation_id( + client, + initialized_app, + "compute_path_size", + location_id=f"{location_id}", + path=f"{path}", + ).with_query(user_id=user_id) + response = await client.post(f"{url}") + + received, _ = assert_status( + response, + status.HTTP_200_OK, + PathTotalSizeCreate, + ) + assert received + assert received.path == path + assert received.size == expected_total_size + return received.size + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=5, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1b"),), + workspace_files_count=10, + ) + ], + ids=str, +) +async def test_path_compute_size( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + with_random_project_with_files: tuple[ + dict[str, Any], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ], + project_params: ProjectWithFilesParams, +): + assert ( + len(project_params.allowed_file_sizes) == 1 + ), "test preconditions are not filled! allowed file sizes should have only 1 option for this test" + project, list_of_files = with_random_project_with_files + + total_num_files = sum( + len(files_in_node) for files_in_node in list_of_files.values() + ) + + # get size of a full project + expected_total_size = project_params.allowed_file_sizes[0] * total_num_files + path = Path(project["uuid"]) + await _assert_compute_path_size( + initialized_app, + client, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of one of the nodes + selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + path = Path(project["uuid"]) / f"{selected_node_id}" + selected_node_s3_keys = [ + Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + await _assert_compute_path_size( + initialized_app, + client, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of the outputs of one of the nodes + path = Path(project["uuid"]) / f"{selected_node_id}" / "outputs" + selected_node_s3_keys = [ + Path(s3_object_id) + for s3_object_id in list_of_files[selected_node_id] + if s3_object_id.startswith(f"{path}") + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + await _assert_compute_path_size( + initialized_app, + client, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of workspace in one of the nodes (this is semi-cached in the DB) + path = Path(project["uuid"]) / f"{selected_node_id}" / "workspace" + selected_node_s3_keys = [ + Path(s3_object_id) + for s3_object_id in list_of_files[selected_node_id] + if s3_object_id.startswith(f"{path}") + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + workspace_total_size = await _assert_compute_path_size( + initialized_app, + client, + location_id, + user_id, + path=path, + expected_total_size=expected_total_size, + ) + + # get size of folders inside the workspace + folders_inside_workspace = [ + p[0] + for p in _filter_and_group_paths_one_level_deeper(selected_node_s3_keys, path) + if p[1] is False + ] + accumulated_subfolder_size = 0 + for workspace_subfolder in folders_inside_workspace: + selected_node_s3_keys = [ + Path(s3_object_id) + for s3_object_id in list_of_files[selected_node_id] + if s3_object_id.startswith(f"{workspace_subfolder}") + ] + expected_total_size = project_params.allowed_file_sizes[0] * len( + selected_node_s3_keys + ) + accumulated_subfolder_size += await _assert_compute_path_size( + initialized_app, + client, + location_id, + user_id, + path=workspace_subfolder, + expected_total_size=expected_total_size, + ) + + assert workspace_total_size == accumulated_subfolder_size + + +async def test_path_compute_size_inexistent_path( + initialized_app: FastAPI, + client: httpx.AsyncClient, + location_id: LocationID, + user_id: UserID, + faker: Faker, + fake_datcore_tokens: tuple[str, str], +): + await _assert_compute_path_size( + initialized_app, + client, + location_id, + user_id, + path=Path(faker.file_path(absolute=False)), + expected_total_size=0, + ) diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index bcda9331f2b..d3768fd09eb 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -13,35 +13,44 @@ from pathlib import Path from typing import Any, Literal +import httpx import pytest import sqlalchemy as sa -from aiohttp import ClientResponseError -from aiohttp.test_utils import TestClient -from aiopg.sa.engine import Engine from aws_library.s3 import SimcoreS3API from faker import Faker -from models_library.api_schemas_storage import FileMetaDataGet, FoldersBody +from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + FoldersBody, +) from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr, SimcoreS3FileID from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, TypeAdapter -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.fastapi import url_from_operation_id +from pytest_simcore.helpers.httpx_assert_checks import assert_status from pytest_simcore.helpers.logging_tools import log_context +from pytest_simcore.helpers.storage_utils import ( + FileIDDict, + ProjectWithFilesParams, + get_updated_project, +) +from pytest_simcore.helpers.storage_utils_file_meta_data import ( + assert_file_meta_data_in_db, +) +from pytest_simcore.helpers.storage_utils_project import clone_project_data from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status -from servicelib.aiohttp.long_running_tasks.client import long_running_task_request +from servicelib.fastapi.long_running_tasks.client import long_running_task_request from settings_library.s3 import S3Settings from simcore_postgres_database.storage_models import file_meta_data from simcore_service_storage.models import SearchFilesQueryParams from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager -from tests.helpers.utils_file_meta_data import assert_file_meta_data_in_db -from tests.helpers.utils_project import clone_project_data +from sqlalchemy.ext.asyncio import AsyncEngine from yarl import URL -from ..helpers.utils import get_updated_project - pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer", "minio"] @@ -71,38 +80,37 @@ async def _fake_download_to_file_or_raise(session, url, dest_path): ) -async def test_simcore_s3_access_returns_default(client: TestClient): - assert client.app - url = ( - client.app.router["get_or_create_temporary_s3_access"] - .url_for() - .with_query(user_id=1) - ) +async def test_simcore_s3_access_returns_default( + initialized_app: FastAPI, client: httpx.AsyncClient +): + url = url_from_operation_id( + client, initialized_app, "get_or_create_temporary_s3_access" + ).with_query(user_id=1) + response = await client.post(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + received_settings, error = assert_status(response, status.HTTP_200_OK, S3Settings) assert not error - assert data - received_settings = S3Settings.model_validate(data) assert received_settings async def _request_copy_folders( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, source_project: dict[str, Any], dst_project: dict[str, Any], nodes_map: dict[NodeID, NodeID], ) -> dict[str, Any]: - assert client.app - url = client.make_url( - f"{(client.app.router['copy_folders_from_project'].url_for().with_query(user_id=user_id))}" - ) + url = url_from_operation_id( + client, initialized_app, "copy_folders_from_project" + ).with_query(user_id=user_id) + with log_context( logging.INFO, f"Copying folders from {source_project['uuid']} to {dst_project['uuid']}", ) as ctx: async for lr_task in long_running_task_request( - client.session, + client, url, json=jsonable_encoder( FoldersBody( @@ -118,13 +126,12 @@ async def _request_copy_folders( async def test_copy_folders_from_non_existing_project( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, create_project: Callable[[], Awaitable[dict[str, Any]]], faker: Faker, ): - assert client.app - src_project = await create_project() incorrect_src_project = deepcopy(src_project) incorrect_src_project["uuid"] = faker.uuid4() @@ -132,36 +139,45 @@ async def test_copy_folders_from_non_existing_project( incorrect_dst_project = deepcopy(dst_project) incorrect_dst_project["uuid"] = faker.uuid4() - with pytest.raises( - ClientResponseError, match=f"{incorrect_src_project['uuid']} was not found" - ) as exc_info: + with pytest.raises(httpx.HTTPStatusError, match="404") as exc_info: await _request_copy_folders( + initialized_app, client, user_id, incorrect_src_project, dst_project, nodes_map={}, ) - assert exc_info.value.status == status.HTTP_404_NOT_FOUND + assert_status( + exc_info.value.response, + status.HTTP_404_NOT_FOUND, + None, + expected_msg=f"{incorrect_src_project['uuid']} was not found", + ) - with pytest.raises( - ClientResponseError, match=f"{incorrect_dst_project['uuid']} was not found" - ) as exc_info: + with pytest.raises(httpx.HTTPStatusError, match="404") as exc_info: await _request_copy_folders( + initialized_app, client, user_id, src_project, incorrect_dst_project, nodes_map={}, ) - assert exc_info.value.status == status.HTTP_404_NOT_FOUND + assert_status( + exc_info.value.response, + status.HTTP_404_NOT_FOUND, + None, + expected_msg=f"{incorrect_dst_project['uuid']} was not found", + ) async def test_copy_folders_from_empty_project( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, create_project: Callable[[], Awaitable[dict[str, Any]]], - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, storage_s3_client: SimcoreS3API, ): # we will copy from src to dst @@ -169,6 +185,7 @@ async def test_copy_folders_from_empty_project( dst_project = await create_project() data = await _request_copy_folders( + initialized_app, client, user_id, src_project, @@ -177,7 +194,7 @@ async def test_copy_folders_from_empty_project( ) assert data == jsonable_encoder(dst_project) # check there is nothing in the dst project - async with aiopg_engine.acquire() as conn: + async with sqlalchemy_async_engine.connect() as conn: num_entries = await conn.scalar( sa.select(sa.func.count()) .select_from(file_meta_data) @@ -192,36 +209,51 @@ def short_dsm_cleaner_interval(monkeypatch: pytest.MonkeyPatch) -> int: return 1 +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=1, + allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("210Mib"),), + allowed_file_checksums=( + TypeAdapter(SHA256Str).validate_python( + "0b3216d95ec5a36c120ba16c88911dcf5ff655925d0fbdbc74cf95baf86de6fc" + ), + ), + workspace_files_count=0, + ), + ], + ids=str, +) async def test_copy_folders_from_valid_project_with_one_large_file( + initialized_app: FastAPI, short_dsm_cleaner_interval: int, - client: TestClient, + client: httpx.AsyncClient, user_id: UserID, create_project: Callable[[], Awaitable[dict[str, Any]]], - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, random_project_with_files: Callable[ - [int, tuple[ByteSize], tuple[SHA256Str]], + [ProjectWithFilesParams], Awaitable[ - tuple[ - dict[str, Any], - dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], - ] + tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] ], ], + project_params: ProjectWithFilesParams, ): # 1. create a src project with 1 large file - sha256_checksum: SHA256Str = TypeAdapter(SHA256Str).validate_python( - "0b3216d95ec5a36c120ba16c88911dcf5ff655925d0fbdbc74cf95baf86de6fc" - ) - src_project, src_projects_list = await random_project_with_files( - 1, - (TypeAdapter(ByteSize).validate_python("210Mib"),), - (sha256_checksum,), - ) + src_project, src_projects_list = await random_project_with_files(project_params) # 2. create a dst project without files dst_project, nodes_map = clone_project_data(src_project) dst_project = await create_project(**dst_project) # copy the project files data = await _request_copy_folders( + initialized_app, client, user_id, src_project, @@ -229,7 +261,7 @@ async def test_copy_folders_from_valid_project_with_one_large_file( nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()}, ) assert data == jsonable_encoder( - await get_updated_project(aiopg_engine, dst_project["uuid"]) + await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"]) ) # check that file meta data was effectively copied for src_node_id in src_projects_list: @@ -243,10 +275,10 @@ async def test_copy_folders_from_valid_project_with_one_large_file( checksum: Any = src_file["sha256_checksum"] assert isinstance(checksum, str) await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=TypeAdapter(SimcoreS3FileID).validate_python( f"{src_file_id}".replace( - src_project["uuid"], dst_project["uuid"] + f"{src_project['uuid']}", dst_project["uuid"] ).replace(f"{src_node_id}", f"{dst_node_id}") ), expected_entry_exists=True, @@ -259,30 +291,62 @@ async def test_copy_folders_from_valid_project_with_one_large_file( ) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=12, + allowed_file_sizes=( + TypeAdapter(ByteSize).validate_python("7Mib"), + TypeAdapter(ByteSize).validate_python("110Mib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + ), + allowed_file_checksums=( + TypeAdapter(SHA256Str).validate_python( + "311e2e130d83cfea9c3b7560699c221b0b7f9e5d58b02870bd52b695d8b4aabd" + ), + TypeAdapter(SHA256Str).validate_python( + "08e297db979d3c84f6b072c2a1e269e8aa04e82714ca7b295933a0c9c0f62b2e" + ), + TypeAdapter(SHA256Str).validate_python( + "488f3b57932803bbf644593bd46d95599b1d4da1d63bc020d7ebe6f1c255f7f3" + ), + ), + workspace_files_count=0, + ), + ], + ids=str, +) async def test_copy_folders_from_valid_project( short_dsm_cleaner_interval: int, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, create_project: Callable[[], Awaitable[dict[str, Any]]], create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, random_project_with_files: Callable[ - ..., + [ProjectWithFilesParams], Awaitable[ - tuple[ - dict[str, Any], - dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | SHA256Str]]], - ] + tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] ], ], + project_params: ProjectWithFilesParams, ): # 1. create a src project with some files - src_project, src_projects_list = await random_project_with_files() + src_project, src_projects_list = await random_project_with_files(project_params) # 2. create a dst project without files dst_project, nodes_map = clone_project_data(src_project) dst_project = await create_project(**dst_project) # copy the project files data = await _request_copy_folders( + initialized_app, client, user_id, src_project, @@ -290,7 +354,7 @@ async def test_copy_folders_from_valid_project( nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()}, ) assert data == jsonable_encoder( - await get_updated_project(aiopg_engine, dst_project["uuid"]) + await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"]) ) # check that file meta data was effectively copied @@ -305,10 +369,10 @@ async def test_copy_folders_from_valid_project( checksum: Any = src_file["sha256_checksum"] assert isinstance(checksum, str) await assert_file_meta_data_in_db( - aiopg_engine, + sqlalchemy_async_engine, file_id=TypeAdapter(SimcoreS3FileID).validate_python( f"{src_file_id}".replace( - src_project["uuid"], dst_project["uuid"] + f"{src_project['uuid']}", dst_project["uuid"] ).replace(f"{src_node_id}", f"{dst_node_id}") ), expected_entry_exists=True, @@ -324,7 +388,8 @@ async def test_copy_folders_from_valid_project( async def _create_and_delete_folders_from_project( user_id: UserID, project: dict[str, Any], - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, project_db_creator: Callable, check_list_files: bool, ) -> None: @@ -333,6 +398,7 @@ async def _create_and_delete_folders_from_project( # creating a copy data = await _request_copy_folders( + initialized_app, client, user_id, project, @@ -347,35 +413,38 @@ async def _create_and_delete_folders_from_project( project_id = data["uuid"] # list data to check all is here - assert client.app + if check_list_files: - url = ( - client.app.router["get_files_metadata"] - .url_for(location_id=f"{SimcoreS3DataManager.get_location_id()}") - .with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") - ) + url = url_from_operation_id( + client, + initialized_app, + "list_files_metadata", + location_id=f"{SimcoreS3DataManager.get_location_id()}", + ).with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") + resp = await client.get(f"{url}") - data, error = await assert_status(resp, status.HTTP_200_OK) + data, error = assert_status(resp, status.HTTP_200_OK, list[FileMetaDataGet]) assert not error # DELETING - url = ( - client.app.router["delete_folders_of_project"] - .url_for(folder_id=project_id) - .with_query(user_id=f"{user_id}") - ) + url = url_from_operation_id( + client, + initialized_app, + "delete_folders_of_project", + folder_id=project_id, + ).with_query(user_id=f"{user_id}") resp = await client.delete(f"{url}") - - await assert_status(resp, expected_status_code=status.HTTP_204_NO_CONTENT) + assert_status(resp, status.HTTP_204_NO_CONTENT, None) # list data is gone if check_list_files: - url = ( - client.app.router["get_files_metadata"] - .url_for(location_id=f"{SimcoreS3DataManager.get_location_id()}") - .with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") - ) + url = url_from_operation_id( + client, + initialized_app, + "list_files_metadata", + location_id=f"{SimcoreS3DataManager.get_location_id()}", + ).with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") resp = await client.get(f"{url}") - data, error = await assert_status(resp, status.HTTP_200_OK) + data, error = assert_status(resp, status.HTTP_200_OK, list[FileMetaDataGet]) assert not error assert not data @@ -386,47 +455,48 @@ def set_log_levels_for_noisy_libraries() -> None: logging.getLogger("werkzeug").setLevel(logging.WARNING) -@pytest.fixture -async def with_random_project_with_files( - random_project_with_files: Callable[ - ..., - Awaitable[ - tuple[ - dict[str, Any], - dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], - ] - ], - ], -) -> tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]],]: - return await random_project_with_files( - file_sizes=( - TypeAdapter(ByteSize).validate_python("1Mib"), - TypeAdapter(ByteSize).validate_python("2Mib"), - TypeAdapter(ByteSize).validate_python("5Mib"), - ) - ) - - async def test_connect_to_external( set_log_levels_for_noisy_libraries: None, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, project_id: ProjectID, ): - assert client.app - url = ( - client.app.router["get_files_metadata"] - .url_for(location_id=f"{SimcoreS3DataManager.get_location_id()}") - .with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") - ) + url = url_from_operation_id( + client, + initialized_app, + "list_files_metadata", + location_id=f"{SimcoreS3DataManager.get_location_id()}", + ).with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") resp = await client.get(f"{url}") - data, error = await assert_status(resp, status.HTTP_200_OK) + data, error = assert_status(resp, status.HTTP_200_OK, list[FileMetaDataGet]) print(data) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=3, + allowed_file_sizes=( + TypeAdapter(ByteSize).validate_python("7Mib"), + TypeAdapter(ByteSize).validate_python("110Mib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + ), + workspace_files_count=0, + ) + ], +) async def test_create_and_delete_folders_from_project( set_log_levels_for_noisy_libraries: None, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, create_project: Callable[..., Awaitable[dict[str, Any]]], with_random_project_with_files: tuple[ @@ -437,15 +507,42 @@ async def test_create_and_delete_folders_from_project( ): project_in_db, _ = with_random_project_with_files await _create_and_delete_folders_from_project( - user_id, project_in_db, client, create_project, check_list_files=True + user_id, + project_in_db, + initialized_app, + client, + create_project, + check_list_files=True, ) +@pytest.mark.flaky(max_runs=3) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +@pytest.mark.parametrize( + "project_params", + [ + ProjectWithFilesParams( + num_nodes=3, + allowed_file_sizes=( + TypeAdapter(ByteSize).validate_python("7Mib"), + TypeAdapter(ByteSize).validate_python("110Mib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + ), + workspace_files_count=0, + ) + ], +) @pytest.mark.parametrize("num_concurrent_calls", [50]) async def test_create_and_delete_folders_from_project_burst( set_log_levels_for_noisy_libraries: None, minio_s3_settings_envs: EnvVarsDict, - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, with_random_project_with_files: tuple[ dict[str, Any], @@ -460,7 +557,12 @@ async def test_create_and_delete_folders_from_project_burst( await asyncio.gather( *[ _create_and_delete_folders_from_project( - user_id, project_in_db, client, create_project, check_list_files=False + user_id, + project_in_db, + initialized_app, + client, + create_project, + check_list_files=False, ) for _ in range(num_concurrent_calls) ] @@ -504,34 +606,35 @@ async def search_files_query_params( return q +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize("expected_number_of_user_files", [0, 1, 3]) @pytest.mark.parametrize("query_params_choice", ["default", "limited", "with_offset"]) async def test_search_files_request( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, uploaded_file_ids: list[SimcoreS3FileID], query_params_choice: str, search_files_query_params: SearchFilesQueryParams, ): - assert client.app assert query_params_choice assert search_files_query_params.user_id == user_id - - url = ( - client.app.router["search_files"] - .url_for() - .with_query( - jsonable_encoder( - search_files_query_params, exclude_unset=True, exclude_none=True - ) + url = url_from_operation_id(client, initialized_app, "search_files").with_query( + jsonable_encoder( + search_files_query_params, exclude_unset=True, exclude_none=True ) ) + response = await client.post(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + found, error = assert_status(response, status.HTTP_200_OK, list[FileMetaDataGet]) assert not error - - found = TypeAdapter(list[FileMetaDataGet]).validate_python(data) + assert found is not None expected = uploaded_file_ids[ search_files_query_params.offset : search_files_query_params.offset @@ -540,11 +643,18 @@ async def test_search_files_request( assert [_.file_uuid for _ in found] == expected +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) @pytest.mark.parametrize("search_startswith", [True, False]) @pytest.mark.parametrize("search_sha256_checksum", [True, False]) @pytest.mark.parametrize("kind", ["owned", "read", None]) async def test_search_files( - client: TestClient, + initialized_app: FastAPI, + client: httpx.AsyncClient, user_id: UserID, upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], faker: Faker, @@ -552,32 +662,28 @@ async def test_search_files( search_sha256_checksum: bool, kind: Literal["owned"], ): - assert client.app _file_name: str = faker.file_name() _sha256_checksum: SHA256Str = TypeAdapter(SHA256Str).validate_python(faker.sha256()) - url = ( - client.app.router["search_files"] - .url_for() - .with_query( - jsonable_encoder( - { - "user_id": user_id, - "kind": kind, - }, - exclude_none=True, - ) + url = url_from_operation_id(client, initialized_app, "search_files").with_query( + jsonable_encoder( + { + "user_id": user_id, + "kind": kind, + }, + exclude_none=True, ) ) response = await client.post(f"{url}") if kind != "owned": - await assert_status(response, status.HTTP_422_UNPROCESSABLE_ENTITY) + assert_status(response, status.HTTP_422_UNPROCESSABLE_ENTITY, None) return - data, error = await assert_status(response, status.HTTP_200_OK) + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) assert not list_fmds # let's upload some files now @@ -588,9 +694,11 @@ async def test_search_files( ) # search again should return something response = await client.post(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) + assert list_fmds assert len(list_fmds) == 1 assert list_fmds[0].file_id == file_id assert list_fmds[0].file_size == file.stat().st_size @@ -604,9 +712,11 @@ async def test_search_files( url.update_query(sha256_checksum=_sha256_checksum) response = await client.post(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) + assert list_fmds assert len(list_fmds) == 1 assert list_fmds[0].file_id == file_id assert list_fmds[0].file_size == file.stat().st_size @@ -624,7 +734,8 @@ async def test_search_files( if search_startswith or search_sha256_checksum: response = await client.post(f"{url}") - data, error = await assert_status(response, status.HTTP_200_OK) + list_fmds, error = assert_status( + response, status.HTTP_200_OK, list[FileMetaDataGet] + ) assert not error - list_fmds = TypeAdapter(list[FileMetaDataGet]).validate_python(data) assert not list_fmds diff --git a/services/storage/tests/unit/test_models.py b/services/storage/tests/unit/test_models.py index 250b037e5cf..da33d24ad50 100644 --- a/services/storage/tests/unit/test_models.py +++ b/services/storage/tests/unit/test_models.py @@ -1,7 +1,7 @@ import uuid import pytest -from models_library.api_schemas_storage import S3BucketName +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID, StorageFileID from pydantic import TypeAdapter, ValidationError diff --git a/services/storage/tests/unit/test_resources.py b/services/storage/tests/unit/test_resources.py index d74fedcc437..29aa51a6f72 100644 --- a/services/storage/tests/unit/test_resources.py +++ b/services/storage/tests/unit/test_resources.py @@ -6,7 +6,7 @@ from pathlib import Path import pytest -from simcore_service_storage.resources import storage_resources +from simcore_service_storage.core.resources import storage_resources log = logging.getLogger(__name__) diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py new file mode 100644 index 00000000000..ee6787b22a3 --- /dev/null +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -0,0 +1,71 @@ +# pylint:disable=no-name-in-module +# pylint:disable=protected-access +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=too-many-positional-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable + + +from collections.abc import Awaitable, Callable +from pathlib import Path +from unittest import mock + +import pytest +from faker import Faker +from fastapi import FastAPI +from models_library.projects_nodes_io import LocationID +from models_library.users import UserID +from pytest_mock import MockerFixture +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.storage.paths import compute_path_size +from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager + +pytest_simcore_core_services_selection = ["postgres", "rabbit"] +pytest_simcore_ops_services_selection = ["adminer"] + + +@pytest.fixture +async def storage_rabbitmq_rpc_client( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + rpc_client = await rabbitmq_rpc_client("pytest_storage_rpc_client") + assert rpc_client + return rpc_client + + +@pytest.fixture +async def mock_celery_send_task(mocker: MockerFixture, faker: Faker) -> mock.AsyncMock: + def mocked_send_task(*args, **kwargs): + return faker.uuid4() + + return mocker.patch( + "simcore_service_storage.modules.celery.client.CeleryTaskQueueClient.send_task", + side_effect=mocked_send_task, + ) + + +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) +async def test_path_compute_size_calls_in_celery( + initialized_app: FastAPI, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, + location_id: LocationID, + user_id: UserID, + faker: Faker, + mock_celery_send_task: mock.AsyncMock, +): + received, job_id_data = await compute_path_size( + storage_rabbitmq_rpc_client, + user_id=user_id, + product_name=faker.name(), + location_id=location_id, + path=Path(faker.file_path(absolute=False)), + ) + mock_celery_send_task.assert_called_once() + assert received + assert job_id_data diff --git a/services/storage/tests/unit/test_simcore_s3_dsm.py b/services/storage/tests/unit/test_simcore_s3_dsm.py index 41c69355025..fba91a14707 100644 --- a/services/storage/tests/unit/test_simcore_s3_dsm.py +++ b/services/storage/tests/unit/test_simcore_s3_dsm.py @@ -2,21 +2,21 @@ # pylint:disable=redefined-outer-name from collections.abc import Awaitable, Callable -from contextlib import AbstractAsyncContextManager from pathlib import Path import pytest -from aiopg.sa.engine import Engine from faker import Faker -from models_library.api_schemas_storage import FileUploadSchema from models_library.basic_types import SHA256Str -from models_library.projects_nodes_io import SimcoreS3FileID +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter -from simcore_service_storage import db_file_meta_data +from pytest_simcore.helpers.storage_utils import FileIDDict from simcore_service_storage.models import FileMetaData -from simcore_service_storage.s3 import get_s3_client +from simcore_service_storage.modules.db.file_meta_data import FileMetaDataRepository +from simcore_service_storage.modules.s3 import get_s3_client from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager +from sqlalchemy.ext.asyncio import AsyncEngine pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = ["adminer"] @@ -29,22 +29,32 @@ def file_size() -> ByteSize: @pytest.fixture def mock_copy_transfer_cb() -> Callable[..., None]: - def copy_transfer_cb(total_bytes_copied: int, *, file_name: str) -> None: - ... + def copy_transfer_cb(total_bytes_copied: int, *, file_name: str) -> None: ... return copy_transfer_cb +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test__copy_path_s3_s3( simcore_s3_dsm: SimcoreS3DataManager, create_directory_with_files: Callable[ - ..., AbstractAsyncContextManager[FileUploadSchema] + [str, ByteSize, int, int, ProjectID, NodeID], + Awaitable[ + tuple[SimcoreS3FileID, tuple[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + ], ], upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], file_size: ByteSize, user_id: UserID, + project_id: ProjectID, + node_id: NodeID, mock_copy_transfer_cb: Callable[..., None], - aiopg_engine: Engine, + sqlalchemy_async_engine: AsyncEngine, ): def _get_dest_file_id(src: SimcoreS3FileID) -> SimcoreS3FileID: return TypeAdapter(SimcoreS3FileID).validate_python( @@ -52,12 +62,13 @@ def _get_dest_file_id(src: SimcoreS3FileID) -> SimcoreS3FileID: ) async def _copy_s3_path(s3_file_id_to_copy: SimcoreS3FileID) -> None: - async with aiopg_engine.acquire() as conn: - exiting_fmd = await db_file_meta_data.get(conn, s3_file_id_to_copy) + existing_fmd = await FileMetaDataRepository.instance( + sqlalchemy_async_engine + ).get(file_id=s3_file_id_to_copy) await simcore_s3_dsm._copy_path_s3_s3( # noqa: SLF001 user_id=user_id, - src_fmd=exiting_fmd, + src_fmd=existing_fmd, dst_file_id=_get_dest_file_id(s3_file_id_to_copy), bytes_transfered_cb=mock_copy_transfer_cb, ) @@ -74,24 +85,23 @@ async def _count_files(s3_file_id: SimcoreS3FileID, expected_count: int) -> None # using directory - FILE_COUNT = 4 + FILE_COUNT = 20 SUBDIR_COUNT = 5 - async with create_directory_with_files( + s3_object, _ = await create_directory_with_files( dir_name="some-random", file_size_in_dir=file_size, subdir_count=SUBDIR_COUNT, file_count=FILE_COUNT, - ) as directory_file_upload: - assert len(directory_file_upload.urls) == 1 - assert directory_file_upload.urls[0].path - s3_object = directory_file_upload.urls[0].path.lstrip("/") + project_id=project_id, + node_id=node_id, + ) - s3_file_id_dir_src = TypeAdapter(SimcoreS3FileID).validate_python(s3_object) - s3_file_id_dir_dst = _get_dest_file_id(s3_file_id_dir_src) + s3_file_id_dir_src = TypeAdapter(SimcoreS3FileID).validate_python(s3_object) + s3_file_id_dir_dst = _get_dest_file_id(s3_file_id_dir_src) - await _count_files(s3_file_id_dir_dst, expected_count=0) - await _copy_s3_path(s3_file_id_dir_src) - await _count_files(s3_file_id_dir_dst, expected_count=FILE_COUNT * SUBDIR_COUNT) + await _count_files(s3_file_id_dir_dst, expected_count=0) + await _copy_s3_path(s3_file_id_dir_src) + await _count_files(s3_file_id_dir_dst, expected_count=FILE_COUNT) # using a single file @@ -99,6 +109,12 @@ async def _count_files(s3_file_id: SimcoreS3FileID, expected_count: int) -> None await _copy_s3_path(simcore_file_id) +@pytest.mark.parametrize( + "location_id", + [SimcoreS3DataManager.get_location_id()], + ids=[SimcoreS3DataManager.get_location_name()], + indirect=True, +) async def test_upload_and_search( simcore_s3_dsm: SimcoreS3DataManager, upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], diff --git a/services/storage/tests/unit/test_simcore_s3_dsm_utils.py b/services/storage/tests/unit/test_simcore_s3_dsm_utils.py index 01869537c08..74c79a8cf36 100644 --- a/services/storage/tests/unit/test_simcore_s3_dsm_utils.py +++ b/services/storage/tests/unit/test_simcore_s3_dsm_utils.py @@ -1,5 +1,5 @@ import pytest -from simcore_service_storage.simcore_s3_dsm_utils import compute_file_id_prefix +from simcore_service_storage.utils.simcore_s3_dsm_utils import compute_file_id_prefix @pytest.mark.parametrize( diff --git a/services/storage/tests/unit/test_utils.py b/services/storage/tests/unit/test_utils.py index 13b132ce045..3ee5d73a85a 100644 --- a/services/storage/tests/unit/test_utils.py +++ b/services/storage/tests/unit/test_utils.py @@ -10,10 +10,10 @@ from pathlib import Path from uuid import uuid4 +import httpx import pytest -from aiohttp import ClientSession from faker import Faker -from models_library.api_schemas_storage import UNDEFINED_SIZE_TYPE +from models_library.api_schemas_storage.storage_schemas import UNDEFINED_SIZE_TYPE from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from pydantic import ByteSize, HttpUrl, TypeAdapter @@ -21,7 +21,7 @@ from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID from simcore_service_storage.models import ETag, FileMetaData, S3BucketName, UploadID from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager -from simcore_service_storage.utils import ( +from simcore_service_storage.utils.utils import ( MAX_CHUNK_SIZE, download_to_file_or_raise, is_file_entry_valid, @@ -33,7 +33,7 @@ async def test_download_files(tmp_path: Path, httpbin_base_url: HttpUrl): destination = tmp_path / "data" expected_size = MAX_CHUNK_SIZE * 3 + 1000 - async with ClientSession() as session: + async with httpx.AsyncClient() as session: total_size = await download_to_file_or_raise( session, f"{httpbin_base_url}/bytes/{expected_size}", destination ) diff --git a/services/storage/tests/unit/test_utils_handlers.py b/services/storage/tests/unit/test_utils_handlers.py index cc220ceb3e2..c91b34cb9a8 100644 --- a/services/storage/tests/unit/test_utils_handlers.py +++ b/services/storage/tests/unit/test_utils_handlers.py @@ -1,63 +1,187 @@ +# pylint: disable=protected-access # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable +from collections.abc import AsyncIterator + +import httpx import pytest -from aiohttp import web -from aiohttp.typedefs import Handler -from aws_library.s3 import S3KeyNotFoundError -from pydantic import BaseModel, ValidationError -from pytest_mock import MockerFixture -from servicelib.aiohttp.aiopg_utils import DBAPIError -from simcore_service_storage.db_access_layer import InvalidFileIdentifierError -from simcore_service_storage.exceptions import ( +from asyncpg import PostgresError +from aws_library.s3._errors import S3AccessError, S3KeyNotFoundError +from fastapi import FastAPI, HTTPException, status +from fastapi.exceptions import RequestValidationError +from httpx import AsyncClient +from pydantic import ValidationError +from pytest_simcore.helpers.httpx_assert_checks import assert_status +from simcore_service_storage.exceptions.errors import ( FileAccessRightError, FileMetaDataNotFoundError, + LinkAlreadyExistsError, ProjectAccessRightError, ProjectNotFoundError, ) -from simcore_service_storage.utils_handlers import dsm_exception_handler - - -@pytest.fixture() -async def raising_handler( - mocker: MockerFixture, handler_exception: type[Exception] -) -> Handler: - mock = mocker.patch("aiohttp.typedefs.Handler", autospec=True) - mock.side_effect = handler_exception - return mock +from simcore_service_storage.exceptions.handlers import set_exception_handlers +from simcore_service_storage.modules.datcore_adapter.datcore_adapter_exceptions import ( + DatcoreAdapterTimeoutError, +) +from simcore_service_storage.modules.db.access_layer import InvalidFileIdentifierError @pytest.fixture -def mock_request(mocker: MockerFixture) -> web.Request: - return mocker.patch("aiohttp.web.Request", autospec=True) +def initialized_app() -> FastAPI: + app = FastAPI() + set_exception_handlers(app) + return app -class FakeErrorModel(BaseModel): - dummy: int = 1 +@pytest.fixture +async def client(initialized_app: FastAPI) -> AsyncIterator[AsyncClient]: + async with AsyncClient( + transport=httpx.ASGITransport(app=initialized_app), + base_url="http://test", + headers={"Content-Type": "application/json"}, + ) as client: + yield client @pytest.mark.parametrize( - "handler_exception, expected_web_response", + "exception, status_code", [ - (InvalidFileIdentifierError(identifier="x"), web.HTTPUnprocessableEntity), - (FileMetaDataNotFoundError(file_id="x"), web.HTTPNotFound), - (S3KeyNotFoundError(key="x", bucket="x"), web.HTTPNotFound), - (ProjectNotFoundError(project_id="x"), web.HTTPNotFound), - (FileAccessRightError(file_id="x", access_right="x"), web.HTTPForbidden), - (ProjectAccessRightError(project_id="x", access_right="x"), web.HTTPForbidden), ( - ValidationError.from_exception_data(title="test", line_errors=[]), - web.HTTPUnprocessableEntity, + InvalidFileIdentifierError( + identifier="pytest file identifier", details="pytest details" + ), + status.HTTP_422_UNPROCESSABLE_ENTITY, + ), + ( + FileMetaDataNotFoundError(file_id="pytest file ID"), + status.HTTP_404_NOT_FOUND, + ), + ( + S3KeyNotFoundError(key="pytest key", bucket="pytest bucket"), + status.HTTP_404_NOT_FOUND, + ), + ( + ProjectNotFoundError(project_id="pytest project ID"), + status.HTTP_404_NOT_FOUND, + ), + ( + FileAccessRightError( + access_right="pytest access rights", file_id="pytest file ID" + ), + status.HTTP_403_FORBIDDEN, + ), + ( + ProjectAccessRightError( + access_right="pytest access rights", project_id="pytest project ID" + ), + status.HTTP_403_FORBIDDEN, + ), + ( + LinkAlreadyExistsError(file_id="pytest file ID"), + status.HTTP_422_UNPROCESSABLE_ENTITY, + ), + ( + PostgresError("pytest postgres error"), + status.HTTP_503_SERVICE_UNAVAILABLE, + ), + ( + S3AccessError(), + status.HTTP_503_SERVICE_UNAVAILABLE, + ), + ( + DatcoreAdapterTimeoutError(msg="pytest datcore adapter timeout"), + status.HTTP_504_GATEWAY_TIMEOUT, + ), + ( + NotImplementedError("pytest not implemented error"), + status.HTTP_501_NOT_IMPLEMENTED, ), - (DBAPIError, web.HTTPServiceUnavailable), ], + ids=str, ) -async def test_dsm_exception_handler( - mock_request: web.Request, - raising_handler: Handler, - expected_web_response: type[web.HTTPClientError], +async def test_exception_handlers( + initialized_app: FastAPI, + client: AsyncClient, + exception: Exception, + status_code: int, +): + @initialized_app.get("/test") + async def test_endpoint(): + raise exception + + response = await client.get("/test") + assert_status(response, status_code, None, expected_msg=f"{exception}") + + +async def test_generic_http_exception_handler( + initialized_app: FastAPI, client: AsyncClient +): + @initialized_app.get("/test") + async def test_endpoint(): + raise HTTPException(status_code=status.HTTP_410_GONE) + + response = await client.get("/test") + assert_status(response, status.HTTP_410_GONE, None, expected_msg="Gone") + + +async def test_request_validation_error_handler( + initialized_app: FastAPI, client: AsyncClient ): - with pytest.raises(expected_web_response): - await dsm_exception_handler(mock_request, raising_handler) + _error_msg = "pytest request validation error" + + @initialized_app.get("/test") + async def test_endpoint(): + raise RequestValidationError(errors=[_error_msg]) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_422_UNPROCESSABLE_ENTITY, + None, + expected_msg=_error_msg, + ) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +async def test_validation_error_handler(initialized_app: FastAPI, client: AsyncClient): + _error_msg = "pytest request validation error" + + @initialized_app.get("/test") + async def test_endpoint(): + raise ValidationError.from_exception_data( + _error_msg, + line_errors=[], + ) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_500_INTERNAL_SERVER_ERROR, + None, + expected_msg=f"0 validation errors for {_error_msg}", + ) + + +@pytest.mark.xfail( + reason="Generic exception handler is not working as expected as shown in https://github.com/ITISFoundation/osparc-simcore/blob/5732a12e07e63d5ce55010ede9b9ab543bb9b278/packages/service-library/tests/fastapi/test_exceptions_utils.py" +) +async def test_generic_exception_handler(initialized_app: FastAPI, client: AsyncClient): + _error_msg = "Generic pytest exception" + + @initialized_app.get("/test") + async def test_endpoint(): + raise Exception( # pylint: disable=broad-exception-raised # noqa: TRY002 + _error_msg + ) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_500_INTERNAL_SERVER_ERROR, + None, + expected_msg=_error_msg, + ) diff --git a/services/web/server/VERSION b/services/web/server/VERSION index c5d4cee36a1..bf54d53ec26 100644 --- a/services/web/server/VERSION +++ b/services/web/server/VERSION @@ -1 +1 @@ -0.51.0 +0.61.4 diff --git a/services/web/server/docker/boot.sh b/services/web/server/docker/boot.sh index 08a812f7bb0..6b42600e91a 100755 --- a/services/web/server/docker/boot.sh +++ b/services/web/server/docker/boot.sh @@ -19,7 +19,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/web/server - uv pip --quiet --no-cache-dir sync requirements/dev.txt + uv pip --quiet sync requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -31,7 +31,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy - uv pip install --no-cache-dir debugpy + uv pip install debugpy fi APP_LOG_LEVEL=${WEBSERVER_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} diff --git a/services/web/server/requirements/_base.in b/services/web/server/requirements/_base.in index caf883fc166..aecaa5793bb 100644 --- a/services/web/server/requirements/_base.in +++ b/services/web/server/requirements/_base.in @@ -32,10 +32,11 @@ aiosmtplib # email asyncpg # db captcha cryptography # security +deepdiff[optimize] # diffs data-structures faker # Only used in dev-mode for proof-of-concepts gunicorn[setproctitle] +httpx jinja_app_loader # email -json2html jsondiff msgpack openpyxl # excel diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index f35f5ab6212..7cf4d029ed9 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -88,6 +88,7 @@ anyio==4.3.0 # via # fast-depends # faststream + # httpx appdirs==1.4.4 # via pint arrow==1.2.3 @@ -145,6 +146,8 @@ certifi==2023.7.22 # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt + # httpcore + # httpx # requests cffi==1.17.1 # via cryptography @@ -186,6 +189,8 @@ cryptography==41.0.7 # -c requirements/../../../../requirements/constraints.txt # -r requirements/_base.in # aiohttp-session +deepdiff==8.1.1 + # via -r requirements/_base.in deprecated==1.2.14 # via # opentelemetry-api @@ -225,10 +230,46 @@ grpcio==1.66.0 # via opentelemetry-exporter-otlp-proto-grpc gunicorn==23.0.0 # via -r requirements/_base.in +h11==0.14.0 + # via httpcore +httpcore==1.0.7 + # via httpx +httpx==0.28.1 + # via + # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../requirements/constraints.txt + # -r requirements/_base.in idna==3.3 # via # anyio # email-validator + # httpx # requests # yarl importlib-metadata==8.0.0 @@ -267,8 +308,6 @@ jinja2==3.1.2 # -c requirements/../../../../requirements/constraints.txt # aiohttp-jinja2 # swagger-ui-py -json2html==1.3.0 - # via -r requirements/_base.in jsondiff==2.0.0 # via -r requirements/_base.in jsonschema==3.2.0 @@ -416,6 +455,8 @@ opentelemetry-util-http==0.48b0 # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-requests +orderly-set==5.2.3 + # via deepdiff orjson==3.10.0 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -465,6 +506,7 @@ orjson==3.10.0 # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in + # deepdiff packaging==24.1 # via # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in @@ -499,6 +541,8 @@ pycountry==23.12.11 # via -r requirements/_base.in pycparser==2.21 # via cffi +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.2 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -585,6 +629,34 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.5.2 # via + # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../requirements/constraints.txt # -r requirements/../../../../packages/models-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -742,6 +814,10 @@ sqlalchemy==1.4.47 # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic +stream-zip==0.0.83 + # via + # -r requirements/../../../../packages/service-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in swagger-ui-py==23.9.23 # via -r requirements/_base.in tenacity==8.5.0 diff --git a/services/web/server/requirements/_test.in b/services/web/server/requirements/_test.in index 22ed8e423bc..368a9d03f7d 100644 --- a/services/web/server/requirements/_test.in +++ b/services/web/server/requirements/_test.in @@ -15,6 +15,8 @@ click coverage docker Faker +fastapi[standard] +fastapi-pagination flaky hypothesis jsonref @@ -38,6 +40,7 @@ pytest-sugar pytest-xdist python-dotenv redis +respx sqlalchemy[mypy] # adds Mypy / Pep-484 Support for ORM Mappings SEE https://docs.sqlalchemy.org/en/20/orm/extensions/mypy.html tenacity types-aiofiles diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 6beb543d3f8..256b7662b20 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -14,6 +14,16 @@ alembic==1.8.1 # via # -c requirements/_base.txt # -r requirements/_test.in +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic +anyio==4.3.0 + # via + # -c requirements/_base.txt + # httpx + # starlette + # watchfiles async-timeout==4.0.3 # via # -c requirements/_base.txt @@ -31,10 +41,13 @@ attrs==21.4.0 # hypothesis # jsonschema # pytest-docker + # referencing certifi==2023.7.22 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt + # httpcore + # httpx # requests charset-normalizer==2.0.12 # via @@ -45,18 +58,34 @@ click==8.1.3 # via # -c requirements/_base.txt # -r requirements/_test.in -coverage==7.6.10 + # typer + # uvicorn +coverage==7.6.12 # via # -r requirements/_test.in # pytest-cov +dnspython==2.2.1 + # via + # -c requirements/_base.txt + # email-validator docker==7.1.0 # via -r requirements/_test.in +email-validator==2.2.0 + # via + # -c requirements/_base.txt + # fastapi execnet==2.1.1 # via pytest-xdist faker==19.6.1 # via # -c requirements/_base.txt # -r requirements/_test.in +fastapi==0.115.6 + # via -r requirements/_test.in +fastapi-cli==0.0.5 + # via fastapi +fastapi-pagination==0.12.34 + # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in frozenlist==1.4.1 @@ -68,6 +97,23 @@ greenlet==2.0.2 # via # -c requirements/_base.txt # sqlalchemy +h11==0.14.0 + # via + # -c requirements/_base.txt + # httpcore + # uvicorn +httpcore==1.0.7 + # via + # -c requirements/_base.txt + # httpx +httptools==0.6.4 + # via uvicorn +httpx==0.28.1 + # via + # -c requirements/../../../../requirements/constraints.txt + # -c requirements/_base.txt + # fastapi + # respx hypothesis==6.91.0 # via -r requirements/_test.in icdiff==2.0.7 @@ -75,10 +121,18 @@ icdiff==2.0.7 idna==3.3 # via # -c requirements/_base.txt + # anyio + # email-validator + # httpx # requests # yarl iniconfig==2.0.0 # via pytest +jinja2==3.1.2 + # via + # -c requirements/../../../../requirements/constraints.txt + # -c requirements/_base.txt + # fastapi jsonref==1.1.0 # via -r requirements/_test.in jsonschema==3.2.0 @@ -92,16 +146,25 @@ mako==1.2.2 # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # alembic +markdown-it-py==3.0.0 + # via + # -c requirements/_base.txt + # rich markupsafe==2.1.1 # via # -c requirements/_base.txt + # jinja2 # mako +mdurl==0.1.2 + # via + # -c requirements/_base.txt + # markdown-it-py multidict==6.1.0 # via # -c requirements/_base.txt # aiohttp # yarl -mypy==1.14.1 +mypy==1.15.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -121,11 +184,26 @@ pprintpp==0.4.0 # via pytest-icdiff py-cpuinfo==9.0.0 # via pytest-benchmark +pydantic==2.10.2 + # via + # -c requirements/../../../../requirements/constraints.txt + # -c requirements/_base.txt + # fastapi + # fastapi-pagination +pydantic-core==2.27.1 + # via + # -c requirements/_base.txt + # pydantic +pygments==2.15.1 + # via + # -c requirements/_base.txt + # rich pyrsistent==0.18.1 # via # -c requirements/_base.txt # jsonschema -pytest==8.3.4 + # referencing +pytest==8.3.5 # via # -r requirements/_test.in # pytest-aiohttp @@ -149,7 +227,7 @@ pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-docker==3.1.1 +pytest-docker==3.2.0 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in @@ -171,30 +249,52 @@ python-dotenv==1.0.1 # via # -c requirements/_base.txt # -r requirements/_test.in + # uvicorn +python-multipart==0.0.20 + # via fastapi pyyaml==6.0.1 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # openapi-spec-validator + # uvicorn redis==5.2.1 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in +referencing==0.8.11 + # via + # -c requirements/../../../../requirements/constraints.txt + # types-jsonschema requests==2.32.2 # via # -c requirements/_base.txt # docker +respx==0.22.0 + # via -r requirements/_test.in +rich==13.4.2 + # via + # -c requirements/_base.txt + # typer setuptools==69.1.1 # via # -c requirements/_base.txt # jsonschema # openapi-spec-validator +shellingham==1.5.4 + # via + # -c requirements/_base.txt + # typer six==1.16.0 # via # -c requirements/_base.txt # jsonschema # python-dateutil +sniffio==1.3.1 + # via + # -c requirements/_base.txt + # anyio sortedcontainers==2.4.0 # via hypothesis sqlalchemy==1.4.47 @@ -205,15 +305,23 @@ sqlalchemy==1.4.47 # alembic sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy +starlette==0.41.3 + # via + # -c requirements/../../../../requirements/constraints.txt + # fastapi tenacity==8.5.0 # via # -c requirements/_base.txt # -r requirements/_test.in termcolor==2.5.0 # via pytest-sugar +typer==0.12.3 + # via + # -c requirements/_base.txt + # fastapi-cli types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -types-jsonschema==4.17.0.10 +types-jsonschema==4.23.0.20241208 # via -r requirements/_test.in types-openpyxl==3.1.5.20241225 # via -r requirements/_test.in @@ -225,17 +333,33 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # asyncpg-stubs + # fastapi + # fastapi-pagination # mypy + # pydantic + # pydantic-core # sqlalchemy2-stubs + # typer urllib3==2.2.3 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # docker # requests -websockets==14.2 - # via -r requirements/_test.in +uvicorn==0.34.0 + # via + # fastapi + # fastapi-cli +uvloop==0.21.0 + # via uvicorn +watchfiles==1.0.4 + # via uvicorn +websockets==15.0 + # via + # -r requirements/_test.in + # uvicorn yarl==1.9.4 # via # -c requirements/_base.txt # aiohttp + # referencing diff --git a/services/web/server/requirements/_tools.txt b/services/web/server/requirements/_tools.txt index 44d0a30fe00..731ed3fc72f 100644 --- a/services/web/server/requirements/_tools.txt +++ b/services/web/server/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -20,17 +20,17 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit inotify==0.2.10 # via -r requirements/_tools.in -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via # -c requirements/_test.txt # -r requirements/../../../../requirements/devenv.txt @@ -51,7 +51,7 @@ packaging==24.1 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../../requirements/devenv.txt @@ -62,7 +62,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -74,7 +74,7 @@ pyyaml==6.0.1 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../../requirements/devenv.txt setuptools==69.1.1 # via @@ -90,7 +90,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg index 65736bec36e..ccbfa6b24c9 100644 --- a/services/web/server/setup.cfg +++ b/services/web/server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.51.0 +current_version = 0.61.4 commit = True message = services/webserver api version: {current_version} → {new_version} tag = False diff --git a/services/web/server/src/simcore_service_webserver/activity/settings.py b/services/web/server/src/simcore_service_webserver/activity/settings.py index bfa727ccb35..f84eede661a 100644 --- a/services/web/server/src/simcore_service_webserver/activity/settings.py +++ b/services/web/server/src/simcore_service_webserver/activity/settings.py @@ -1,7 +1,7 @@ from aiohttp import web from settings_library.prometheus import PrometheusSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY def get_plugin_settings(app: web.Application) -> PrometheusSettings: diff --git a/services/web/server/src/simcore_service_webserver/announcements/_handlers.py b/services/web/server/src/simcore_service_webserver/announcements/_handlers.py index 596d31a43c2..ca925a39e14 100644 --- a/services/web/server/src/simcore_service_webserver/announcements/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/announcements/_handlers.py @@ -3,7 +3,7 @@ from aiohttp import web from .._meta import api_version_prefix -from ..products.api import get_product_name +from ..products import products_web from ..utils_aiohttp import envelope_json_response from . import _api from ._models import Announcement @@ -14,7 +14,7 @@ @routes.get(f"/{api_version_prefix}/announcements", name="list_announcements") async def list_announcements(request: web.Request) -> web.Response: """Returns non-expired announcements for current product""" - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) announcements: list[Announcement] = await _api.list_announcements( request.app, product_name=product_name ) diff --git a/services/web/server/src/simcore_service_webserver/announcements/plugin.py b/services/web/server/src/simcore_service_webserver/announcements/plugin.py index fd3b3f79b43..88a39940cbf 100644 --- a/services/web/server/src/simcore_service_webserver/announcements/plugin.py +++ b/services/web/server/src/simcore_service_webserver/announcements/plugin.py @@ -7,7 +7,7 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..products.plugin import setup_products from ..redis import setup_redis from . import _handlers diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index ea389a6869f..6facdd9ddf1 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.1.0 info: title: simcore-service-webserver description: Main service with an interface (http-API & websockets) to the web front-end - version: 0.51.0 + version: 0.61.4 servers: - url: '' description: webserver @@ -253,9 +253,8 @@ paths: post: tags: - auth - summary: Reset Password - description: a non logged-in user requests a password reset - operationId: auth_reset_password + summary: Initiate Reset Password + operationId: initiate_reset_password requestBody: content: application/json: @@ -279,9 +278,8 @@ paths: post: tags: - auth - summary: Reset Password Allowed - description: changes password using a token code without being logged in - operationId: auth_reset_password_allowed + summary: Complete Reset Password + operationId: complete_reset_password parameters: - name: code in: path @@ -303,7 +301,7 @@ paths: schema: $ref: '#/components/schemas/Envelope_Log_' '401': - description: unauthorized reset due to invalid token code + description: Invalid token code content: application/json: schema: @@ -1107,13 +1105,15 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_GetCreditPrice_' + $ref: '#/components/schemas/Envelope_CreditPriceGet_' /v0/products/{product_name}: get: tags: - products - po summary: Get Product + description: 'NOTE: `/products/current` is used to define current project w/o + naming it' operationId: get_product parameters: - name: product_name @@ -1133,43 +1133,20 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_GetProduct_' - /v0/products/{product_name}/templates/{template_id}: - put: + $ref: '#/components/schemas/Envelope_ProductGet_' + /v0/products/current/ui: + get: tags: - products - - po - summary: Update Product Template - operationId: update_product_template - parameters: - - name: product_name - in: path - required: true - schema: - anyOf: - - type: string - minLength: 1 - maxLength: 100 - - const: current - type: string - title: Product Name - - name: template_id - in: path - required: true - schema: - type: string - minLength: 1 - maxLength: 100 - title: Template Id - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/UpdateProductTemplate' + summary: Get Current Product Ui + operationId: get_current_product_ui responses: - '204': + '200': description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ProductUIGet_' /v0/invitation:generate: post: tags: @@ -1181,7 +1158,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/GenerateInvitation' + $ref: '#/components/schemas/InvitationGenerate' required: true responses: '200': @@ -1203,22 +1180,6 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_MyProfileGet_' - put: - tags: - - users - summary: Replace My Profile - description: Use PATCH instead - operationId: replace_my_profile - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/MyProfilePatch' - required: true - responses: - '204': - description: Successful Response - deprecated: true patch: tags: - users @@ -2048,7 +2009,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Page_CatalogServiceGet_' + $ref: '#/components/schemas/Page_CatalogLatestServiceGet_' /v0/catalog/services/{service_key}/{service_version}: get: tags: @@ -2367,7 +2328,8 @@ paths: tags: - catalog - pricing-plans - summary: Retrieve default pricing plan for provided service + summary: Get Service Pricing Plan + description: Retrieve default pricing plan for provided service operationId: get_service_pricing_plan parameters: - name: service_key @@ -2947,7 +2909,8 @@ paths: tags: - folders - workspaces - summary: Move folder to the workspace + summary: Move Folder To Workspace + description: Move folder to the workspace operationId: move_folder_to_workspace parameters: - name: folder_id @@ -3000,7 +2963,8 @@ paths: tags: - long-running-tasks summary: List Tasks - operationId: list_tasks + description: Lists all long running tasks + operationId: get_async_jobs responses: '200': description: Successful Response @@ -3008,12 +2972,37 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_list_TaskGet__' + '404': + description: Not Found + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '410': + description: Gone + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '500': + description: Internal Server Error + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' /v0/tasks/{task_id}: get: tags: - long-running-tasks summary: Get Task Status - operationId: get_task_status + description: Retrieves the status of a task + operationId: get_async_job_status parameters: - name: task_id in: path @@ -3028,11 +3017,36 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_TaskStatus_' + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '410': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Gone + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Internal Server Error delete: tags: - long-running-tasks summary: Cancel And Delete Task - operationId: cancel_and_delete_task + description: Cancels and deletes a task + operationId: abort_async_job parameters: - name: task_id in: path @@ -3043,12 +3057,37 @@ paths: responses: '204': description: Successful Response + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '410': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Gone + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Internal Server Error /v0/tasks/{task_id}/result: get: tags: - long-running-tasks summary: Get Task Result - operationId: get_task_result + description: Retrieves the result of a task + operationId: get_async_job_result parameters: - name: task_id in: path @@ -3062,6 +3101,30 @@ paths: content: application/json: schema: {} + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '410': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Gone + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Internal Server Error /v0/catalog/licensed-items: get: tags: @@ -3124,52 +3187,6 @@ paths: schema: $ref: '#/components/schemas/EnvelopedError' description: Bad Request - /v0/catalog/licensed-items/{licensed_item_id}: - get: - tags: - - licenses - - catalog - summary: Get Licensed Item - operationId: get_licensed_item - parameters: - - name: licensed_item_id - in: path - required: true - schema: - type: string - format: uuid - title: Licensed Item Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Envelope_LicensedItemRestGet_' - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/EnvelopedError' - description: Not Found - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/EnvelopedError' - description: Forbidden - '402': - content: - application/json: - schema: - $ref: '#/components/schemas/EnvelopedError' - description: Payment Required - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/EnvelopedError' - description: Bad Request /v0/catalog/licensed-items/{licensed_item_id}:purchase: post: tags: @@ -3192,8 +3209,12 @@ paths: schema: $ref: '#/components/schemas/LicensedItemsBodyParams' responses: - '204': + '200': description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/LicensedItemPurchaseGet' '404': content: application/json: @@ -3448,98 +3469,6 @@ paths: schema: $ref: '#/components/schemas/EnvelopedError' description: Bad Request - /v0/projects/{project_uuid}/checkpoint/{ref_id}/iterations: - get: - tags: - - projects - - metamodeling - summary: List Project Iterations - operationId: list_project_iterations - parameters: - - name: project_uuid - in: path - required: true - schema: - type: string - format: uuid - title: Project Uuid - - name: ref_id - in: path - required: true - schema: - type: integer - title: Ref Id - - name: limit - in: query - required: false - schema: - type: integer - minimum: 1 - exclusiveMaximum: true - default: 20 - title: Limit - maximum: 50 - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Page_ProjectIterationItem_' - /v0/projects/{project_uuid}/checkpoint/{ref_id}/iterations/-/results: - get: - tags: - - projects - - metamodeling - summary: List Project Iterations Results - operationId: list_project_iterations_results - parameters: - - name: project_uuid - in: path - required: true - schema: - type: string - format: uuid - title: Project Uuid - - name: ref_id - in: path - required: true - schema: - type: integer - title: Ref Id - - name: limit - in: query - required: false - schema: - type: integer - minimum: 1 - exclusiveMaximum: true - default: 20 - title: Limit - maximum: 50 - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Page_ProjectIterationResultItem_' /v0/services: get: tags: @@ -3694,7 +3623,10 @@ paths: post: tags: - projects - summary: Creates a new project or copies an existing one + summary: Create Project + description: 'Creates a new project or copies an existing one. NOTE: implemented + as a long running task, i.e. requires polling `status_href` (HTTP_200_OK) + to get status and `result_href` (HTTP_201_CREATED) to get created project' operationId: create_project parameters: - name: x_simcore_user_agent @@ -3764,17 +3696,59 @@ paths: - $ref: '#/components/schemas/ProjectCopyOverride' title: ' Body' responses: - '201': + '202': description: Successful Response content: application/json: schema: $ref: '#/components/schemas/Envelope_TaskGet_' - get: - tags: - - projects - summary: List Projects - operationId: list_projects + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + get: + tags: + - projects + summary: List Projects + operationId: list_projects parameters: - name: type in: query @@ -3857,6 +3831,48 @@ paths: application/json: schema: $ref: '#/components/schemas/Page_ProjectListItem_' + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required /v0/projects/active: get: tags: @@ -3877,6 +3893,48 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_ProjectGet_' + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required /v0/projects/{project_id}: get: tags: @@ -3898,6 +3956,48 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_ProjectGet_' + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required patch: tags: - projects @@ -3920,6 +4020,48 @@ paths: responses: '204': description: Successful Response + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required delete: tags: - projects @@ -3936,6 +4078,48 @@ paths: responses: '204': description: Successful Response + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required /v0/projects/{project_id}:clone: post: tags: @@ -3957,11 +4141,53 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_TaskGet_' - /v0/projects:search: - get: - tags: - - projects - summary: List Projects Full Search + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + /v0/projects:search: + get: + tags: + - projects + summary: List Projects Full Search operationId: list_projects_full_search parameters: - name: filters @@ -4020,6 +4246,48 @@ paths: application/json: schema: $ref: '#/components/schemas/Page_ProjectListItem_' + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required /v0/projects/{project_id}/inactivity: get: tags: @@ -4041,12 +4309,55 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_GetProjectInactivityResponse_' + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required /v0/projects/{project_uuid}/comments: post: tags: - projects - comments - summary: Create a new comment for a specific project. The request body should + summary: Create Project Comment + description: Create a new comment for a specific project. The request body should contain the comment contents and user information. operationId: create_project_comment parameters: @@ -4074,7 +4385,8 @@ paths: tags: - projects - comments - summary: Retrieve all comments for a specific project. + summary: List Project Comments + description: Retrieve all comments for a specific project. operationId: list_project_comments parameters: - name: project_uuid @@ -4111,7 +4423,8 @@ paths: tags: - projects - comments - summary: Update the contents of a specific comment for a project. The request + summary: Update Project Comment + description: Update the contents of a specific comment for a project. The request body should contain the updated comment contents. operationId: update_project_comment parameters: @@ -4147,7 +4460,8 @@ paths: tags: - projects - comments - summary: Delete a specific comment associated with a project. + summary: Delete Project Comment + description: Delete a specific comment associated with a project. operationId: delete_project_comment parameters: - name: project_uuid @@ -4172,7 +4486,8 @@ paths: tags: - projects - comments - summary: Retrieve a specific comment by its ID within a project. + summary: Get Project Comment + description: Retrieve a specific comment by its ID within a project. operationId: get_project_comment parameters: - name: project_uuid @@ -4202,7 +4517,8 @@ paths: tags: - projects - folders - summary: Move project to the folder + summary: Replace Project Folder + description: Move project to the folder operationId: replace_project_folder parameters: - name: project_id @@ -4693,12 +5009,35 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_dict_Annotated_str__StringConstraints___ImageResources__' + /v0/projects/{project_id}/nodes/-/services: + get: + tags: + - projects + - nodes + summary: Get Project Services + operationId: get_project_services + parameters: + - name: project_id + in: path + required: true + schema: + type: string + format: uuid + title: Project Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ProjectNodeServicesGet_' /v0/projects/{project_id}/nodes/-/services:access: get: tags: - projects - nodes - summary: Check whether provided group has access to the project services + summary: Get Project Services Access For Gid + description: Check whether provided group has access to the project services operationId: get_project_services_access_for_gid parameters: - name: project_id @@ -4728,7 +5067,8 @@ paths: tags: - projects - nodes - summary: Lists all previews in the node's project + summary: List Project Nodes Previews + description: Lists all previews in the node's project operationId: list_project_nodes_previews parameters: - name: project_id @@ -4750,7 +5090,8 @@ paths: tags: - projects - nodes - summary: Gets a give node's preview + summary: Get Project Node Preview + description: Gets a give node's preview operationId: get_project_node_preview parameters: - name: project_id @@ -4780,7 +5121,8 @@ paths: get: tags: - projects - summary: Get currently connected pricing unit to the project node. + summary: Get Project Node Pricing Unit + description: Get currently connected pricing unit to the project node. operationId: get_project_node_pricing_unit parameters: - name: project_id @@ -4808,8 +5150,9 @@ paths: put: tags: - projects - summary: Connect pricing unit to the project node (Project node can have only - one pricing unit) + summary: Connect Pricing Unit To Project Node + description: Connect pricing unit to the project node (Project node can have + only one pricing unit) operationId: connect_pricing_unit_to_project_node parameters: - name: project_id @@ -5107,7 +5450,8 @@ paths: get: tags: - projects - summary: Get current connected wallet to the project. + summary: Get Project Wallet + description: Get current connected wallet to the project. operationId: get_project_wallet parameters: - name: project_id @@ -5128,7 +5472,8 @@ paths: put: tags: - projects - summary: Connect wallet to the project (Project can have only one wallet) + summary: Connect Wallet To Project + description: Connect wallet to the project (Project can have only one wallet) operationId: connect_wallet_to_project parameters: - name: project_id @@ -5193,7 +5538,8 @@ paths: tags: - projects - workspaces - summary: Move project to the workspace + summary: Move Project To Workspace + description: Move project to the workspace operationId: move_project_to_workspace parameters: - name: project_id @@ -5236,8 +5582,9 @@ paths: get: tags: - usage - summary: Retrieve finished and currently running user services (user and product - are taken from context, optionally wallet_id parameter might be provided). + summary: List Resource Usage Services + description: Retrieve finished and currently running user services (user and + product are taken from context, optionally wallet_id parameter might be provided). operationId: list_resource_usage_services parameters: - name: order_by @@ -5294,7 +5641,8 @@ paths: get: tags: - usage - summary: Used credits based on aggregate by type, currently supported `services`. + summary: List Osparc Credits Aggregated Usages + description: Used credits based on aggregate by type, currently supported `services`. (user and product are taken from context, optionally wallet_id parameter might be provided). operationId: list_osparc_credits_aggregated_usages @@ -5340,7 +5688,8 @@ paths: get: tags: - usage - summary: Redirects to download CSV link. CSV obtains finished and currently + summary: Export Resource Usage Services + description: Redirects to download CSV link. CSV obtains finished and currently running user services (user and product are taken from context, optionally wallet_id parameter might be provided). operationId: export_resource_usage_services @@ -5730,9 +6079,9 @@ paths: get: tags: - storage - summary: Get available storage locations - description: Returns the list of available storage locations - operationId: get_storage_locations + summary: List Storage Locations + description: Get available storage locations + operationId: list_storage_locations responses: '200': description: Successful Response @@ -5740,17 +6089,16 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/DatasetMetaData' + $ref: '#/components/schemas/FileLocation' type: array - title: Response Get Storage Locations - /v0/storage/locations/{location_id}:sync: - post: + title: Response List Storage Locations + /v0/storage/locations/{location_id}/paths: + get: tags: - storage - summary: Manually triggers the synchronisation of the file meta data table in - the database - description: Returns an object containing added, changed and removed paths - operationId: synchronise_meta_data_table + summary: List Storage Paths + description: Lists the files/directories in WorkingDirectory + operationId: list_storage_paths parameters: - name: location_id in: path @@ -5758,34 +6106,75 @@ paths: schema: type: integer title: Location Id - - name: dry_run + - name: size in: query required: false schema: - type: boolean - default: false - title: Dry Run - - name: fire_and_forget + type: integer + minimum: 1 + exclusiveMaximum: true + default: 50 + title: Size + maximum: 1000 + - name: cursor in: query required: false schema: - type: boolean - default: false - title: Fire And Forget + anyOf: + - type: string + - type: 'null' + title: Cursor + - name: fileFilter + in: query + required: false + schema: + anyOf: + - type: string + format: path + - type: 'null' + title: Filefilter responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/Envelope_TableSynchronisation_' + $ref: '#/components/schemas/CursorPage___T_Customized_PathMetaDataGet_' + /v0/storage/locations/{location_id}/paths/{path}:size: + post: + tags: + - storage + summary: Compute Path Size + description: Compute the size of a path + operationId: compute_path_size + parameters: + - name: location_id + in: path + required: true + schema: + type: integer + title: Location Id + - name: path + in: path + required: true + schema: + type: string + format: path + title: Path + responses: + '202': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_TaskGet_' /v0/storage/locations/{location_id}/datasets: get: tags: - storage - summary: Get datasets metadata - description: returns all the top level datasets a user has access to - operationId: get_datasets_metadata + summary: List Datasets Metadata + description: Get datasets metadata + operationId: list_datasets_metadata parameters: - name: location_id in: path @@ -5804,9 +6193,8 @@ paths: get: tags: - storage - summary: Get datasets metadata - description: returns all the file meta data a user has access to (uuid_filter - may be used) + summary: Get Files Metadata + description: Get datasets metadata operationId: get_files_metadata parameters: - name: location_id @@ -5844,9 +6232,9 @@ paths: get: tags: - storage - summary: Get Files Metadata - description: returns all the file meta data inside dataset with dataset_id - operationId: get_files_metadata_dataset + summary: List Dataset Files Metadata + description: Get Files Metadata + operationId: list_dataset_files_metadata parameters: - name: location_id in: path @@ -5883,8 +6271,7 @@ paths: tags: - storage summary: Get File Metadata - description: returns the file meta data of file_id if user_id has the rights - to + description: Get File Metadata operationId: get_file_metadata parameters: - name: location_id @@ -5913,8 +6300,8 @@ paths: get: tags: - storage - summary: Returns download link for requested file - description: creates a download file link if user has the rights to + summary: Download File + description: Returns download link for requested file operationId: download_file parameters: - name: location_id @@ -5945,9 +6332,8 @@ paths: put: tags: - storage - summary: Returns upload link - description: creates one or more upload file links if user has the rights to, - expects the client to complete/abort upload + summary: Upload File + description: Returns upload link operationId: upload_file parameters: - name: location_id @@ -5999,8 +6385,8 @@ paths: delete: tags: - storage - summary: Deletes File - description: deletes file if user has the rights to + summary: Delete File + description: Deletes File operationId: delete_file parameters: - name: location_id @@ -6080,8 +6466,8 @@ paths: post: tags: - storage - summary: Check for upload completion - description: Returns state of upload completion + summary: Is Completed Upload File + description: Check for upload completion operationId: is_completed_upload_file parameters: - name: location_id @@ -6109,80 +6495,123 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_FileUploadCompleteFutureResponse_' - /v0/trash: - delete: - tags: - - trash - summary: Empty Trash - operationId: empty_trash - responses: - '204': - description: Successful Response - /v0/projects/{project_id}:trash: + /v0/storage/locations/{location_id}/export-data: post: tags: - - trash - - projects - summary: Trash Project - operationId: trash_project + - storage + summary: Export Data + description: Export data + operationId: export_data parameters: - - name: project_id + - name: location_id in: path required: true schema: - type: string - format: uuid - title: Project Id - - name: force - in: query - required: false - schema: - type: boolean - default: false - title: Force + type: integer + title: Location Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DataExportPost' responses: - '204': + '200': description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_TaskGet_' '404': - description: Not such a project - '409': - description: Project is in use and cannot be trashed - '503': - description: Trash service error - /v0/projects/{project_id}:untrash: + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '410': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Gone + '500': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Internal Server Error + /v0/trash:empty: post: tags: - trash - - projects - summary: Untrash Project - operationId: untrash_project - parameters: - - name: project_id - in: path - required: true - schema: - type: string - format: uuid - title: Project Id + summary: Empty Trash + operationId: empty_trash responses: '204': description: Successful Response - /v0/folders/{folder_id}:trash: + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '404': + description: Not Found + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '409': + description: Conflict + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '503': + description: Service Unavailable + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '422': + description: Unprocessable Entity + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '400': + description: Bad Request + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '402': + description: Payment Required + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + /v0/projects/{project_id}:trash: post: tags: - trash - - folders - summary: Trash Folder - operationId: trash_folder + - projects + summary: Trash Project + operationId: trash_project parameters: - - name: folder_id + - name: project_id in: path required: true schema: - type: integer - exclusiveMinimum: true - title: Folder Id - minimum: 0 + type: string + format: uuid + title: Project Id - name: force in: query required: false @@ -6193,47 +6622,123 @@ paths: responses: '204': description: Successful Response + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden '404': - description: Not such a folder + description: Not such a project + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' '409': - description: One or more projects in the folder are in use and cannot be - trashed + description: Project is in use and cannot be trashed + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' '503': description: Trash service error - /v0/folders/{folder_id}:untrash: + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + /v0/projects/{project_id}:untrash: post: tags: - trash - - folders - summary: Untrash Folder - operationId: untrash_folder + - projects + summary: Untrash Project + operationId: untrash_project parameters: - - name: folder_id + - name: project_id in: path required: true schema: - type: integer - exclusiveMinimum: true - title: Folder Id - minimum: 0 + type: string + format: uuid + title: Project Id responses: '204': description: Successful Response - /v0/workspaces/{workspace_id}:trash: + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + /v0/folders/{folder_id}:trash: post: tags: - trash - - workspaces - summary: Trash Workspace - operationId: trash_workspace + - folders + summary: Trash Folder + operationId: trash_folder parameters: - - name: workspace_id + - name: folder_id in: path required: true schema: type: integer exclusiveMinimum: true - title: Workspace Id + title: Folder Id minimum: 0 - name: force in: query @@ -6245,253 +6750,216 @@ paths: responses: '204': description: Successful Response + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden '404': - description: Not such a workspace + description: Not such a folder '409': - description: One or more projects in the workspace are in use and cannot - be trashed + description: One or more projects in the folder are in use and cannot be + trashed '503': description: Trash service error - /v0/workspaces/{workspace_id}:untrash: + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + /v0/folders/{folder_id}:untrash: post: tags: - trash - - workspaces - summary: Untrash Workspace - operationId: untrash_workspace + - folders + summary: Untrash Folder + operationId: untrash_folder parameters: - - name: workspace_id + - name: folder_id in: path required: true schema: type: integer exclusiveMinimum: true - title: Workspace Id + title: Folder Id minimum: 0 responses: '204': description: Successful Response - /v0/repos/projects: - get: - tags: - - repository - summary: List Repos - operationId: list_repos - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 1 - exclusiveMaximum: true - default: 20 - title: Limit - maximum: 50 - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response + '403': content: application/json: schema: - $ref: '#/components/schemas/Page_RepoApiModel_' - /v0/repos/projects/{project_uuid}/checkpoints: - get: - tags: - - repository - summary: List Checkpoints - operationId: list_checkpoints - parameters: - - name: project_uuid - in: path - required: true - schema: - type: string - format: uuid - title: Project Uuid - - name: limit - in: query - required: false - schema: - type: integer - minimum: 1 - exclusiveMaximum: true - default: 20 - title: Limit - maximum: 50 - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': content: application/json: schema: - $ref: '#/components/schemas/Page_CheckpointApiModel_' - post: - tags: - - repository - summary: Create Checkpoint - operationId: create_checkpoint - parameters: - - name: project_uuid - in: path - required: true - schema: - type: string - format: uuid - title: Project Uuid - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/CheckpointNew' - responses: - '200': - description: Successful Response + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': content: application/json: schema: - $ref: '#/components/schemas/Envelope_CheckpointApiModel_' - /v0/repos/projects/{project_uuid}/checkpoints/{ref_id}: - get: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + /v0/workspaces/{workspace_id}:trash: + post: tags: - - repository - summary: Get Checkpoint - operationId: get_checkpoint + - trash + - workspaces + summary: Trash Workspace + operationId: trash_workspace parameters: - - name: ref_id + - name: workspace_id in: path required: true schema: - anyOf: - - type: integer - - type: string - - const: HEAD - type: string - title: Ref Id - - name: project_uuid - in: path - required: true + type: integer + exclusiveMinimum: true + title: Workspace Id + minimum: 0 + - name: force + in: query + required: false schema: - type: string - format: uuid - title: Project Uuid + type: boolean + default: false + title: Force responses: - '200': + '204': description: Successful Response + '403': content: application/json: schema: - $ref: '#/components/schemas/Envelope_CheckpointApiModel_' - patch: - tags: - - repository - summary: Update Checkpoint - description: Update Checkpoint Annotations - operationId: update_checkpoint - parameters: - - name: ref_id - in: path - required: true - schema: - anyOf: - - type: integer - - type: string - title: Ref Id - - name: project_uuid - in: path - required: true - schema: - type: string - format: uuid - title: Project Uuid - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/CheckpointAnnotations' - responses: - '200': - description: Successful Response + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + description: Not such a workspace + '409': + description: One or more projects in the workspace are in use and cannot + be trashed + '503': + description: Trash service error + '422': content: application/json: schema: - $ref: '#/components/schemas/Envelope_CheckpointApiModel_' - /v0/repos/projects/{project_uuid}/checkpoints/{ref_id}/workbench/view: - get: - tags: - - repository - summary: View Project Workbench - operationId: view_project_workbench - parameters: - - name: ref_id - in: path - required: true - schema: - anyOf: - - type: integer - - type: string - title: Ref Id - - name: project_uuid - in: path - required: true - schema: - type: string - format: uuid - title: Project Uuid - responses: - '200': - description: Successful Response + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': content: application/json: schema: - $ref: '#/components/schemas/Envelope_WorkbenchViewApiModel_' - /v0/repos/projects/{project_uuid}/checkpoints/{ref_id}:checkout: + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required + /v0/workspaces/{workspace_id}:untrash: post: tags: - - repository - summary: Checkout - operationId: checkout + - trash + - workspaces + summary: Untrash Workspace + operationId: untrash_workspace parameters: - - name: ref_id - in: path - required: true - schema: - anyOf: - - type: integer - - type: string - title: Ref Id - - name: project_uuid + - name: workspace_id in: path required: true schema: - type: string - format: uuid - title: Project Uuid + type: integer + exclusiveMinimum: true + title: Workspace Id + minimum: 0 responses: - '200': + '204': description: Successful Response + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Conflict + '503': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Service Unavailable + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Unprocessable Entity + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/EnvelopedError' + description: Bad Request + '402': content: application/json: schema: - $ref: '#/components/schemas/Envelope_CheckpointApiModel_' + $ref: '#/components/schemas/EnvelopedError' + description: Payment Required /v0/workspaces: post: tags: @@ -7021,8 +7489,8 @@ paths: get: tags: - maintenance - summary: Front end runtime configuration - description: Returns app and products configs + summary: Get Config + description: Front end runtime configuration operationId: get_config responses: '200': @@ -7048,7 +7516,8 @@ paths: get: tags: - maintenance - summary: checks status of self and connected services + summary: Get App Status + description: checks status of self and connected services operationId: get_app_status responses: '200': @@ -7175,7 +7644,7 @@ components: - stats - limits title: Activity - Annotation: + AnnotationUI: properties: type: type: string @@ -7198,7 +7667,7 @@ components: - type - color - attributes - title: Annotation + title: AnnotationUI Announcement: properties: id: @@ -7432,6 +7901,225 @@ components: - default - items title: BootOption + CatalogLatestServiceGet: + properties: + key: + type: string + pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ + title: Key + version: + type: string + pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + title: Version + name: + type: string + title: Name + thumbnail: + anyOf: + - type: string + - type: 'null' + title: Thumbnail + icon: + anyOf: + - type: string + - type: 'null' + title: Icon + description: + type: string + title: Description + descriptionUi: + type: boolean + title: Descriptionui + default: false + versionDisplay: + anyOf: + - type: string + - type: 'null' + title: Versiondisplay + type: + $ref: '#/components/schemas/ServiceType' + contact: + anyOf: + - type: string + format: email + - type: 'null' + title: Contact + authors: + items: + $ref: '#/components/schemas/Author' + type: array + minItems: 1 + title: Authors + owner: + anyOf: + - type: string + format: email + - type: 'null' + title: Owner + description: None when the owner email cannot be found in the database + inputs: + type: object + title: Inputs + outputs: + type: object + title: Outputs + bootOptions: + anyOf: + - type: object + - type: 'null' + title: Bootoptions + minVisibleInputs: + anyOf: + - type: integer + minimum: 0 + - type: 'null' + title: Minvisibleinputs + accessRights: + anyOf: + - additionalProperties: + $ref: '#/components/schemas/ServiceGroupAccessRightsV2' + type: object + - type: 'null' + title: Accessrights + classifiers: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Classifiers + quality: + type: object + title: Quality + release: + $ref: '#/components/schemas/ServiceRelease' + description: release information of current (latest) service + type: object + required: + - key + - version + - name + - description + - type + - contact + - authors + - owner + - inputs + - outputs + - accessRights + - release + title: CatalogLatestServiceGet + example: + accessRights: + '1': + execute: true + write: false + authors: + - affiliation: ACME + email: author@acme.com + name: Author Bar + classifiers: [] + contact: contact@acme.com + description: A service which awaits for time to pass, two times. + description_ui: true + icon: https://cdn-icons-png.flaticon.com/512/25/25231.png + inputs: + input_1: + description: Pick a file containing only one integer + displayOrder: 1 + fileToKeyMap: + single_number.txt: input_1 + keyId: input_1 + label: File with int number + type: data:text/plain + input_2: + contentSchema: + minimum: 0 + title: Sleep interval + type: integer + x_unit: second + defaultValue: 2 + description: Choose an amount of time to sleep in range [0:] + displayOrder: 2 + keyId: input_2 + label: Sleep interval + type: ref_contentSchema + unitLong: second + unitShort: s + input_3: + defaultValue: false + description: If set to true will cause service to fail after it sleeps + displayOrder: 3 + keyId: input_3 + label: Fail after sleep + type: boolean + input_4: + contentSchema: + title: Distance to bed + type: integer + x_unit: meter + defaultValue: 0 + description: It will first walk the distance to bed + displayOrder: 4 + keyId: input_4 + label: Distance to bed + type: ref_contentSchema + unitLong: meter + unitShort: m + input_5: + contentSchema: + minimum: 0 + title: Dream of the night + type: integer + x_unit: byte + defaultValue: 0 + description: Defines the size of the dream that will be generated [0:] + displayOrder: 5 + keyId: input_5 + label: Dream (or nightmare) of the night + type: ref_contentSchema + unitLong: byte + unitShort: B + key: simcore/services/comp/itis/sleeper + name: sleeper + outputs: + output_1: + description: Integer is generated in range [1-9] + displayOrder: 1 + fileToKeyMap: + single_number.txt: output_1 + keyId: output_1 + label: File containing one random integer + type: data:text/plain + output_2: + contentSchema: + title: Random sleep interval + type: integer + x_unit: second + description: Interval is generated in range [1-9] + displayOrder: 2 + keyId: output_2 + label: Random sleep interval + type: ref_contentSchema + unitLong: second + unitShort: s + output_3: + description: Contains some random data representing a dream + displayOrder: 3 + fileToKeyMap: + dream.txt: output_3 + keyId: output_3 + label: Dream output + type: data:text/plain + owner: owner@acme.com + quality: {} + release: + released: '2025-07-20T15:00:00' + version: 2.2.1 + version_display: Summer Release + type: computational + version: 2.2.1 + version_display: 2 Xtreme CatalogServiceGet: properties: key: @@ -7450,6 +8138,11 @@ components: - type: string - type: 'null' title: Thumbnail + icon: + anyOf: + - type: string + - type: 'null' + title: Icon description: type: string title: Description @@ -7516,11 +8209,9 @@ components: type: array - type: 'null' title: Classifiers - default: [] quality: type: object title: Quality - default: {} history: items: $ref: '#/components/schemas/ServiceRelease' @@ -7557,7 +8248,7 @@ components: description: A service which awaits for time to pass, two times. description_ui: true history: - - released: '2024-07-20T15:00:00' + - released: '2024-07-21T15:00:00' version: 2.2.1 version_display: Summer Release - compatibility: @@ -7576,10 +8267,11 @@ components: released: '2024-01-20T18:49:17' version: 0.9.1 versionDisplay: Matterhorn - - retired: '2024-07-20T15:00:00' + - retired: '2024-07-20T16:00:00' version: 0.9.0 - version: 0.8.0 - version: 0.1.0 + icon: https://cdn-icons-png.flaticon.com/512/25/25231.png inputs: input0: contentSchema: @@ -7624,6 +8316,14 @@ components: format: uri - type: 'null' title: Thumbnail + icon: + anyOf: + - type: string + maxLength: 2083 + minLength: 1 + format: uri + - type: 'null' + title: Icon description: anyOf: - type: string @@ -7688,79 +8388,6 @@ components: - new - confirm title: ChangePasswordBody - CheckpointAnnotations: - properties: - tag: - anyOf: - - type: string - - type: 'null' - title: Tag - message: - anyOf: - - type: string - - type: 'null' - title: Message - type: object - title: CheckpointAnnotations - CheckpointApiModel: - properties: - id: - type: integer - exclusiveMinimum: true - title: Id - minimum: 0 - checksum: - type: string - pattern: ^[a-fA-F0-9]{40}$ - title: Checksum - created_at: - type: string - format: date-time - title: Created At - tags: - items: - type: string - type: array - title: Tags - message: - anyOf: - - type: string - - type: 'null' - title: Message - parents_ids: - anyOf: - - items: - type: integer - exclusiveMinimum: true - minimum: 0 - type: array - - type: 'null' - title: Parents Ids - url: - type: string - title: Url - type: object - required: - - id - - checksum - - created_at - - tags - - url - title: CheckpointApiModel - CheckpointNew: - properties: - tag: - type: string - title: Tag - message: - anyOf: - - type: string - - type: 'null' - title: Message - type: object - required: - - tag - title: CheckpointNew CodePageParams: properties: message: @@ -7951,7 +8578,10 @@ components: type: string title: Unitname unitExtraInfo: - $ref: '#/components/schemas/UnitExtraInfo-Input' + anyOf: + - $ref: '#/components/schemas/UnitExtraInfoTier-Input' + - $ref: '#/components/schemas/UnitExtraInfoLicense' + title: Unitextrainfo default: type: boolean title: Default @@ -8014,6 +8644,83 @@ components: required: - priceDollars title: CreateWalletPayment + CreditPriceGet: + properties: + productName: + type: string + title: Productname + usdPerCredit: + anyOf: + - type: number + minimum: 0.0 + - type: 'null' + title: Usdpercredit + description: Price of a credit in USD. If None, then this product's price + is UNDEFINED + minPaymentAmountUsd: + anyOf: + - type: integer + minimum: 0 + - type: 'null' + title: Minpaymentamountusd + description: Minimum amount (included) in USD that can be paid for this + productCan be None if this product's price is UNDEFINED + type: object + required: + - productName + - usdPerCredit + - minPaymentAmountUsd + title: CreditPriceGet + CreditTransactionStatus: + type: string + enum: + - PENDING + - BILLED + - IN_DEBT + - NOT_BILLED + - REQUIRES_MANUAL_REVIEW + title: CreditTransactionStatus + CursorPage___T_Customized_PathMetaDataGet_: + properties: + items: + items: + $ref: '#/components/schemas/PathMetaDataGet' + type: array + title: Items + total: + anyOf: + - type: integer + - type: 'null' + title: Total + description: Total items + current_page: + anyOf: + - type: string + - type: 'null' + title: Current Page + description: Cursor to refetch the current page + current_page_backwards: + anyOf: + - type: string + - type: 'null' + title: Current Page Backwards + description: Cursor to refetch the current page starting from the last item + previous_page: + anyOf: + - type: string + - type: 'null' + title: Previous Page + description: Cursor for the previous page + next_page: + anyOf: + - type: string + - type: 'null' + title: Next Page + description: Cursor for the next page + type: object + required: + - items + title: CursorPage[~_T]Customized[PathMetaDataGet] DatCoreFileLink: properties: store: @@ -8053,6 +8760,21 @@ components: - dataset title: DatCoreFileLink description: I/O port type to hold a link to a file in DATCORE storage + DataExportPost: + properties: + paths: + items: + anyOf: + - type: string + pattern: ^(api|([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}))\/([0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12})\/(.+)$ + - type: string + pattern: ^N:package:[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$ + type: array + title: Paths + type: object + required: + - paths + title: DataExportPost DatasetMetaData: properties: dataset_id: @@ -8193,11 +8915,11 @@ components: title: Error type: object title: Envelope[CatalogServiceGet] - Envelope_CheckpointApiModel_: + Envelope_ComputationGet_: properties: data: anyOf: - - $ref: '#/components/schemas/CheckpointApiModel' + - $ref: '#/components/schemas/ComputationGet' - type: 'null' error: anyOf: @@ -8205,12 +8927,12 @@ components: - type: 'null' title: Error type: object - title: Envelope[CheckpointApiModel] - Envelope_ComputationGet_: + title: Envelope[ComputationGet] + Envelope_CreditPriceGet_: properties: data: anyOf: - - $ref: '#/components/schemas/ComputationGet' + - $ref: '#/components/schemas/CreditPriceGet' - type: 'null' error: anyOf: @@ -8218,7 +8940,7 @@ components: - type: 'null' title: Error type: object - title: Envelope[ComputationGet] + title: Envelope[CreditPriceGet] Envelope_FileMetaDataGet_: properties: data: @@ -8297,32 +9019,6 @@ components: title: Error type: object title: Envelope[FolderGet] - Envelope_GetCreditPrice_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/GetCreditPrice' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[GetCreditPrice] - Envelope_GetProduct_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/GetProduct' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[GetProduct] Envelope_GetProjectInactivityResponse_: properties: data: @@ -8427,19 +9123,6 @@ components: title: Error type: object title: Envelope[LicensedItemPurchaseGet] - Envelope_LicensedItemRestGet_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/LicensedItemRestGet' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[LicensedItemRestGet] Envelope_Log_: properties: data: @@ -8635,6 +9318,32 @@ components: title: Error type: object title: Envelope[PricingUnitGet] + Envelope_ProductGet_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/ProductGet' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[ProductGet] + Envelope_ProductUIGet_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/ProductUIGet' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[ProductUIGet] Envelope_ProjectGet_: properties: data: @@ -8674,6 +9383,19 @@ components: title: Error type: object title: Envelope[ProjectMetadataGet] + Envelope_ProjectNodeServicesGet_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/ProjectNodeServicesGet' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[ProjectNodeServicesGet] Envelope_ProjectState_: properties: data: @@ -8739,24 +9461,11 @@ components: title: Error type: object title: Envelope[ServiceInputGet] - Envelope_ServicePricingPlanGet_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/ServicePricingPlanGet' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[ServicePricingPlanGet] - Envelope_StatusDiagnosticsGet_: + Envelope_ServicePricingPlanGet_: properties: data: anyOf: - - $ref: '#/components/schemas/StatusDiagnosticsGet' + - $ref: '#/components/schemas/ServicePricingPlanGet' - type: 'null' error: anyOf: @@ -8764,12 +9473,12 @@ components: - type: 'null' title: Error type: object - title: Envelope[StatusDiagnosticsGet] - Envelope_TableSynchronisation_: + title: Envelope[ServicePricingPlanGet] + Envelope_StatusDiagnosticsGet_: properties: data: anyOf: - - $ref: '#/components/schemas/TableSynchronisation' + - $ref: '#/components/schemas/StatusDiagnosticsGet' - type: 'null' error: anyOf: @@ -8777,7 +9486,7 @@ components: - type: 'null' title: Error type: object - title: Envelope[TableSynchronisation] + title: Envelope[StatusDiagnosticsGet] Envelope_TagGet_: properties: data: @@ -8941,19 +9650,6 @@ components: title: Error type: object title: Envelope[WalletPaymentInitiated] - Envelope_WorkbenchViewApiModel_: - properties: - data: - anyOf: - - $ref: '#/components/schemas/WorkbenchViewApiModel' - - type: 'null' - error: - anyOf: - - {} - - type: 'null' - title: Error - type: object - title: Envelope[WorkbenchViewApiModel] Envelope_WorkspaceGet_: properties: data: @@ -9645,8 +10341,6 @@ components: status: type: integer title: Status - default: 400 - deprecated: true errors: items: $ref: '#/components/schemas/ErrorItemType' @@ -9664,6 +10358,7 @@ components: type: object required: - message + - status title: ErrorGet ErrorItemType: properties: @@ -9690,47 +10385,73 @@ components: - resource - field title: ErrorItemType - ExtractedResults: + ExecutableAccessRights: properties: - progress: - type: object - title: Progress - description: Progress in each computational node - labels: - type: object - title: Labels - description: Maps captured node with a label - values: - type: object - title: Values - description: Captured outputs per node + write: + type: boolean + title: Write + description: can change executable settings + execute: + type: boolean + title: Execute + description: can run executable + additionalProperties: false type: object required: - - progress - - labels - - values - title: ExtractedResults - example: - labels: - 0f1e38c9-dcb7-443c-a745-91b97ac28ccc: Integer iterator - 2d0ce8b9-c9c3-43ce-ad2f-ad493898de37: Probe Sensor - Integer - 445b44d1-59b3-425c-ac48-7c13e0f2ea5b: Probe Sensor - Integer_2 - d76fca06-f050-4790-88a8-0aac10c87b39: Boolean Parameter - progress: - 4c08265a-427b-4ac3-9eab-1d11c822ada4: 0 - e33c6880-1b1d-4419-82d7-270197738aa9: 100 - values: - 0f1e38c9-dcb7-443c-a745-91b97ac28ccc: - out_1: 1 - out_2: - - 3 - - 4 - 2d0ce8b9-c9c3-43ce-ad2f-ad493898de37: - in_1: 7 - 445b44d1-59b3-425c-ac48-7c13e0f2ea5b: - in_1: 1 - d76fca06-f050-4790-88a8-0aac10c87b39: - out_1: true + - write + - execute + title: ExecutableAccessRights + FeaturesDict: + properties: + age: + type: string + title: Age + date: + type: string + format: date + title: Date + ethnicity: + type: string + title: Ethnicity + functionality: + type: string + title: Functionality + height: + type: string + title: Height + name: + type: string + title: Name + sex: + type: string + title: Sex + species: + type: string + title: Species + version: + type: string + title: Version + weight: + type: string + title: Weight + type: object + required: + - date + title: FeaturesDict + FileLocation: + properties: + name: + type: string + title: Name + id: + type: integer + title: Id + additionalProperties: false + type: object + required: + - name + - id + title: FileLocation FileMetaData: properties: file_uuid: @@ -9963,6 +10684,7 @@ components: title: Urls links: $ref: '#/components/schemas/FileUploadLinks' + additionalProperties: false type: object required: - chunk_size @@ -10079,151 +10801,6 @@ components: required: - name title: FolderReplaceBodyParams - GenerateInvitation: - properties: - guest: - type: string - format: email - title: Guest - trialAccountDays: - anyOf: - - type: integer - exclusiveMinimum: true - minimum: 0 - - type: 'null' - title: Trialaccountdays - extraCreditsInUsd: - anyOf: - - type: integer - exclusiveMaximum: true - minimum: 0 - maximum: 500 - - type: 'null' - title: Extracreditsinusd - type: object - required: - - guest - title: GenerateInvitation - GetCreditPrice: - properties: - productName: - type: string - title: Productname - usdPerCredit: - anyOf: - - type: number - minimum: 0.0 - - type: 'null' - title: Usdpercredit - description: Price of a credit in USD. If None, then this product's price - is UNDEFINED - minPaymentAmountUsd: - anyOf: - - type: integer - minimum: 0 - - type: 'null' - title: Minpaymentamountusd - description: Minimum amount (included) in USD that can be paid for this - productCan be None if this product's price is UNDEFINED - type: object - required: - - productName - - usdPerCredit - - minPaymentAmountUsd - title: GetCreditPrice - GetProduct: - properties: - name: - type: string - title: Name - displayName: - type: string - title: Displayname - shortName: - anyOf: - - type: string - - type: 'null' - title: Shortname - description: Short display name for SMS - vendor: - anyOf: - - type: object - - type: 'null' - title: Vendor - description: vendor attributes - issues: - anyOf: - - items: - type: object - type: array - - type: 'null' - title: Issues - description: Reference to issues tracker - manuals: - anyOf: - - items: - type: object - type: array - - type: 'null' - title: Manuals - description: List of manuals - support: - anyOf: - - items: - type: object - type: array - - type: 'null' - title: Support - description: List of support resources - loginSettings: - type: object - title: Loginsettings - maxOpenStudiesPerUser: - anyOf: - - type: integer - exclusiveMinimum: true - minimum: 0 - - type: 'null' - title: Maxopenstudiesperuser - isPaymentEnabled: - type: boolean - title: Ispaymentenabled - creditsPerUsd: - anyOf: - - type: string - - type: 'null' - title: Creditsperusd - templates: - items: - $ref: '#/components/schemas/GetProductTemplate' - type: array - title: Templates - description: List of templates available to this product for communications - (e.g. emails, sms, etc) - type: object - required: - - name - - displayName - - loginSettings - - maxOpenStudiesPerUser - - isPaymentEnabled - - creditsPerUsd - title: GetProduct - GetProductTemplate: - properties: - id: - type: string - maxLength: 100 - minLength: 1 - title: Id - content: - type: string - title: Content - type: object - required: - - id - - content - title: GetProductTemplate GetProjectInactivityResponse: properties: is_inactive: @@ -10408,10 +10985,13 @@ components: title: Id description: the user's id userName: - type: string - maxLength: 100 - minLength: 1 + anyOf: + - type: string + maxLength: 100 + minLength: 1 + - type: 'null' title: Username + description: None if private gid: anyOf: - type: integer @@ -10426,19 +11006,19 @@ components: format: email - type: 'null' title: Login - description: the user's email, if privacy settings allows + description: the user's email or None if private first_name: anyOf: - type: string - type: 'null' title: First Name - description: If privacy settings allows + description: None if private last_name: anyOf: - type: string - type: 'null' title: Last Name - description: If privacy settings allows + description: None if private gravatar_id: anyOf: - type: string @@ -10453,8 +11033,6 @@ components: description: If group is standard, these are these are the access rights of the user to it.None if primary group. type: object - required: - - userName title: GroupUserGet example: accessRights: @@ -10566,6 +11144,31 @@ components: required: - invitation title: InvitationCheck + InvitationGenerate: + properties: + guest: + type: string + format: email + title: Guest + trialAccountDays: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Trialaccountdays + extraCreditsInUsd: + anyOf: + - type: integer + exclusiveMaximum: true + minimum: 0 + maximum: 500 + - type: 'null' + title: Extracreditsinusd + type: object + required: + - guest + title: InvitationGenerate InvitationGenerated: properties: productName: @@ -10632,6 +11235,13 @@ components: type: string format: uuid title: Licenseditemid + key: + type: string + title: Key + version: + type: string + pattern: ^\d+\.\d+\.\d+$ + title: Version walletId: type: integer exclusiveMinimum: true @@ -10678,6 +11288,8 @@ components: - licensedItemPurchaseId - productName - licensedItemId + - key + - version - walletId - pricingUnitCostId - pricingUnitCost @@ -10695,19 +11307,46 @@ components: type: string format: uuid title: Licenseditemid + key: + type: string + title: Key + version: + type: string + pattern: ^\d+\.\d+\.\d+$ + title: Version displayName: type: string title: Displayname licensedResourceType: $ref: '#/components/schemas/LicensedResourceType' - licensedResourceData: - type: object - title: Licensedresourcedata + licensedResources: + items: + $ref: '#/components/schemas/_ItisVipResourceRestData' + type: array + title: Licensedresources pricingPlanId: type: integer exclusiveMinimum: true title: Pricingplanid minimum: 0 + categoryId: + type: string + maxLength: 100 + minLength: 1 + title: Categoryid + categoryDisplay: + type: string + title: Categorydisplay + categoryIcon: + anyOf: + - type: string + - type: 'null' + title: Categoryicon + termsOfUseUrl: + anyOf: + - type: string + - type: 'null' + title: Termsofuseurl createdAt: type: string format: date-time @@ -10719,10 +11358,14 @@ components: type: object required: - licensedItemId + - key + - version - displayName - licensedResourceType - - licensedResourceData + - licensedResources - pricingPlanId + - categoryId + - categoryDisplay - createdAt - modifiedAt title: LicensedItemRestGet @@ -10891,7 +11534,7 @@ components: additionalProperties: false type: object title: LogoutBody - Marker: + MarkerUI: properties: color: type: string @@ -10901,7 +11544,7 @@ components: type: object required: - color - title: Marker + title: MarkerUI MyGroupsGet: properties: me: @@ -11061,7 +11704,7 @@ components: anyOf: - type: string maxLength: 100 - minLength: 1 + minLength: 4 - type: 'null' title: Username privacy: @@ -11075,6 +11718,9 @@ components: last_name: Crespo MyProfilePrivacyGet: properties: + hideUsername: + type: boolean + title: Hideusername hideFullname: type: boolean title: Hidefullname @@ -11083,11 +11729,17 @@ components: title: Hideemail type: object required: + - hideUsername - hideFullname - hideEmail title: MyProfilePrivacyGet MyProfilePrivacyPatch: properties: + hideUsername: + anyOf: + - type: boolean + - type: 'null' + title: Hideusername hideFullname: anyOf: - type: boolean @@ -11670,6 +12322,32 @@ components: - thumbnail_url - file_url title: NodeScreenshot + NodeServiceGet: + properties: + key: + type: string + pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ + title: Key + release: + $ref: '#/components/schemas/ServiceRelease' + owner: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Owner + description: Service owner primary group id or None if ownership still not + defined + myAccessRights: + $ref: '#/components/schemas/ExecutableAccessRights' + type: object + required: + - key + - release + - owner + - myAccessRights + title: NodeServiceGet NodeState: properties: modified: @@ -11816,25 +12494,7 @@ components: - total - count title: PageMetaInfoLimitOffset - Page_CatalogServiceGet_: - properties: - _meta: - $ref: '#/components/schemas/PageMetaInfoLimitOffset' - _links: - $ref: '#/components/schemas/PageLinks' - data: - items: - $ref: '#/components/schemas/CatalogServiceGet' - type: array - title: Data - additionalProperties: false - type: object - required: - - _meta - - _links - - data - title: Page[CatalogServiceGet] - Page_CheckpointApiModel_: + Page_CatalogLatestServiceGet_: properties: _meta: $ref: '#/components/schemas/PageMetaInfoLimitOffset' @@ -11842,7 +12502,7 @@ components: $ref: '#/components/schemas/PageLinks' data: items: - $ref: '#/components/schemas/CheckpointApiModel' + $ref: '#/components/schemas/CatalogLatestServiceGet' type: array title: Data additionalProperties: false @@ -11851,7 +12511,7 @@ components: - _meta - _links - data - title: Page[CheckpointApiModel] + title: Page[CatalogLatestServiceGet] Page_LicensedItemPurchaseGet_: properties: _meta: @@ -11960,42 +12620,6 @@ components: - _links - data title: Page[PricingPlanGet] - Page_ProjectIterationItem_: - properties: - _meta: - $ref: '#/components/schemas/PageMetaInfoLimitOffset' - _links: - $ref: '#/components/schemas/PageLinks' - data: - items: - $ref: '#/components/schemas/ProjectIterationItem' - type: array - title: Data - additionalProperties: false - type: object - required: - - _meta - - _links - - data - title: Page[ProjectIterationItem] - Page_ProjectIterationResultItem_: - properties: - _meta: - $ref: '#/components/schemas/PageMetaInfoLimitOffset' - _links: - $ref: '#/components/schemas/PageLinks' - data: - items: - $ref: '#/components/schemas/ProjectIterationResultItem' - type: array - title: Data - additionalProperties: false - type: object - required: - - _meta - - _links - - data - title: Page[ProjectIterationResultItem] Page_ProjectListItem_: properties: _meta: @@ -12014,24 +12638,6 @@ components: - _links - data title: Page[ProjectListItem] - Page_RepoApiModel_: - properties: - _meta: - $ref: '#/components/schemas/PageMetaInfoLimitOffset' - _links: - $ref: '#/components/schemas/PageLinks' - data: - items: - $ref: '#/components/schemas/RepoApiModel' - type: array - title: Data - additionalProperties: false - type: object - required: - - _meta - - _links - - data - title: Page[RepoApiModel] Page_ServiceRunGet_: properties: _meta: @@ -12050,22 +12656,6 @@ components: - _links - data title: Page[ServiceRunGet] - ParentMetaProjectRef: - properties: - project_id: - type: string - format: uuid - title: Project Id - ref_id: - type: integer - exclusiveMinimum: true - title: Ref Id - minimum: 0 - type: object - required: - - project_id - - ref_id - title: ParentMetaProjectRef PatchRequestBody: properties: value: @@ -12074,6 +12664,30 @@ components: required: - value title: PatchRequestBody + PathMetaDataGet: + properties: + path: + type: string + format: path + title: Path + description: the path to the current path + display_path: + type: string + format: path + title: Display Path + description: the path to display with UUID replaced (URL Encoded by parts + as names may contain '/') + file_meta_data: + anyOf: + - $ref: '#/components/schemas/FileMetaDataGet' + - type: 'null' + description: if filled, this is the file meta data of the s3 object + additionalProperties: false + type: object + required: + - path + - display_path + title: PathMetaDataGet PaymentMethodGet: properties: idr: @@ -12510,7 +13124,10 @@ components: type: string title: Unitname unitExtraInfo: - $ref: '#/components/schemas/UnitExtraInfo-Output' + anyOf: + - $ref: '#/components/schemas/UnitExtraInfoTier-Output' + - $ref: '#/components/schemas/UnitExtraInfoLicense' + title: Unitextrainfo currentCostPerUnit: type: string title: Currentcostperunit @@ -12554,7 +13171,10 @@ components: type: string title: Unitname unitExtraInfo: - $ref: '#/components/schemas/UnitExtraInfo-Output' + anyOf: + - $ref: '#/components/schemas/UnitExtraInfoTier-Output' + - $ref: '#/components/schemas/UnitExtraInfoLicense' + title: Unitextrainfo currentCostPerUnit: type: string title: Currentcostperunit @@ -12569,6 +13189,113 @@ components: - currentCostPerUnit - default title: PricingUnitGet + ProductGet: + properties: + name: + type: string + title: Name + displayName: + type: string + title: Displayname + shortName: + anyOf: + - type: string + - type: 'null' + title: Shortname + description: Short display name for SMS + vendor: + anyOf: + - type: object + - type: 'null' + title: Vendor + description: vendor attributes + issues: + anyOf: + - items: + type: object + type: array + - type: 'null' + title: Issues + description: Reference to issues tracker + manuals: + anyOf: + - items: + type: object + type: array + - type: 'null' + title: Manuals + description: List of manuals + support: + anyOf: + - items: + type: object + type: array + - type: 'null' + title: Support + description: List of support resources + loginSettings: + type: object + title: Loginsettings + maxOpenStudiesPerUser: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Maxopenstudiesperuser + isPaymentEnabled: + type: boolean + title: Ispaymentenabled + creditsPerUsd: + anyOf: + - type: string + - type: 'null' + title: Creditsperusd + templates: + items: + $ref: '#/components/schemas/ProductTemplateGet' + type: array + title: Templates + description: List of templates available to this product for communications + (e.g. emails, sms, etc) + type: object + required: + - name + - displayName + - loginSettings + - maxOpenStudiesPerUser + - isPaymentEnabled + - creditsPerUsd + title: ProductGet + ProductTemplateGet: + properties: + id: + type: string + maxLength: 100 + minLength: 1 + title: Id + content: + type: string + title: Content + type: object + required: + - id + - content + title: ProductTemplateGet + ProductUIGet: + properties: + productName: + type: string + title: Productname + ui: + type: object + title: Ui + description: Front-end owned ui product configuration + type: object + required: + - productName + - ui + title: ProductUIGet ProjectCopyOverride: properties: name: @@ -12683,14 +13410,6 @@ components: - type: string const: '' title: Thumbnail - creationDate: - type: string - pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z - title: Creationdate - lastChangeDate: - type: string - pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z - title: Lastchangedate workbench: type: object title: Workbench @@ -12706,6 +13425,32 @@ components: minLength: 1 type: object title: Accessrights + creationDate: + type: string + pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z + title: Creationdate + lastChangeDate: + type: string + pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z + title: Lastchangedate + state: + anyOf: + - $ref: '#/components/schemas/ProjectState' + - type: 'null' + trashedAt: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Trashedat + trashedBy: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Trashedby + description: The primary gid of the user who trashed tags: items: type: integer @@ -12716,21 +13461,17 @@ components: type: string type: array title: Classifiers - default: [] - state: - anyOf: - - $ref: '#/components/schemas/ProjectState' - - type: 'null' + default: [] + quality: + type: object + title: Quality + default: {} ui: anyOf: - $ref: '#/components/schemas/EmptyModel' - $ref: '#/components/schemas/StudyUI-Output' - type: 'null' title: Ui - quality: - type: object - title: Quality - default: {} dev: anyOf: - type: object @@ -12754,37 +13495,23 @@ components: minimum: 0 - type: 'null' title: Folderid - trashedAt: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Trashedat - trashedBy: - anyOf: - - type: integer - exclusiveMinimum: true - minimum: 0 - - type: 'null' - title: Trashedby - description: The primary gid of the user who trashed type: object required: - uuid - name - description - thumbnail - - creationDate - - lastChangeDate - workbench - prjOwner - accessRights + - creationDate + - lastChangeDate + - trashedAt + - trashedBy - tags - dev - workspaceId - folderId - - trashedAt - - trashedBy title: ProjectGet ProjectGroupGet: properties: @@ -12855,73 +13582,6 @@ components: - key - value title: ProjectInputUpdate - ProjectIterationItem: - properties: - name: - type: string - title: Name - description: Iteration's resource API name - parent: - $ref: '#/components/schemas/ParentMetaProjectRef' - description: Reference to the the meta-project that created this iteration - iteration_index: - type: integer - exclusiveMinimum: true - title: Iteration Index - minimum: 0 - workcopy_project_id: - type: string - format: uuid - title: Workcopy Project Id - description: ID to this iteration's working copy.A working copy is a real - project where this iteration is run - workcopy_project_url: - type: string - title: Workcopy Project Url - description: reference to a working copy project - type: object - required: - - name - - parent - - iteration_index - - workcopy_project_id - - workcopy_project_url - title: ProjectIterationItem - ProjectIterationResultItem: - properties: - name: - type: string - title: Name - description: Iteration's resource API name - parent: - $ref: '#/components/schemas/ParentMetaProjectRef' - description: Reference to the the meta-project that created this iteration - iteration_index: - type: integer - exclusiveMinimum: true - title: Iteration Index - minimum: 0 - workcopy_project_id: - type: string - format: uuid - title: Workcopy Project Id - description: ID to this iteration's working copy.A working copy is a real - project where this iteration is run - workcopy_project_url: - type: string - title: Workcopy Project Url - description: reference to a working copy project - results: - $ref: '#/components/schemas/ExtractedResults' - type: object - required: - - name - - parent - - iteration_index - - workcopy_project_id - - workcopy_project_url - - results - title: ProjectIterationResultItem ProjectListItem: properties: uuid: @@ -12940,14 +13600,6 @@ components: - type: string const: '' title: Thumbnail - creationDate: - type: string - pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z - title: Creationdate - lastChangeDate: - type: string - pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z - title: Lastchangedate workbench: type: object title: Workbench @@ -12963,6 +13615,32 @@ components: minLength: 1 type: object title: Accessrights + creationDate: + type: string + pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z + title: Creationdate + lastChangeDate: + type: string + pattern: \d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z + title: Lastchangedate + state: + anyOf: + - $ref: '#/components/schemas/ProjectState' + - type: 'null' + trashedAt: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Trashedat + trashedBy: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Trashedby + description: The primary gid of the user who trashed tags: items: type: integer @@ -12974,20 +13652,16 @@ components: type: array title: Classifiers default: [] - state: - anyOf: - - $ref: '#/components/schemas/ProjectState' - - type: 'null' + quality: + type: object + title: Quality + default: {} ui: anyOf: - $ref: '#/components/schemas/EmptyModel' - $ref: '#/components/schemas/StudyUI-Output' - type: 'null' title: Ui - quality: - type: object - title: Quality - default: {} dev: anyOf: - type: object @@ -13011,37 +13685,23 @@ components: minimum: 0 - type: 'null' title: Folderid - trashedAt: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Trashedat - trashedBy: - anyOf: - - type: integer - exclusiveMinimum: true - minimum: 0 - - type: 'null' - title: Trashedby - description: The primary gid of the user who trashed type: object required: - uuid - name - description - thumbnail - - creationDate - - lastChangeDate - workbench - prjOwner - accessRights + - creationDate + - lastChangeDate + - trashedAt + - trashedBy - tags - dev - workspaceId - folderId - - trashedAt - - trashedBy title: ProjectListItem ProjectLocked: properties: @@ -13123,6 +13783,22 @@ components: required: - custom title: ProjectMetadataUpdate + ProjectNodeServicesGet: + properties: + projectUuid: + type: string + format: uuid + title: Projectuuid + services: + items: + $ref: '#/components/schemas/NodeServiceGet' + type: array + title: Services + type: object + required: + - projectUuid + - services + title: ProjectNodeServicesGet ProjectOutputGet: properties: key: @@ -13426,20 +14102,6 @@ components: - topUpAmountInUsd - monthlyLimitInUsd title: ReplaceWalletAutoRecharge - RepoApiModel: - properties: - project_uuid: - type: string - format: uuid - title: Project Uuid - url: - type: string - title: Url - type: object - required: - - project_uuid - - url - title: RepoApiModel ResearchResource: properties: rrid: @@ -13483,6 +14145,7 @@ components: properties: email: type: string + format: email title: Email additionalProperties: false type: object @@ -13989,9 +14652,9 @@ components: format: date-time - type: 'null' title: Retired - description: 'whether this service is planned to be retired. If None, the + description: whether this service is planned to be retired. If None, the service is still active. If now list[ApiKey]: - async with transaction_context(get_asyncpg_engine(app), connection) as conn: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: stmt = sa.select(api_keys.c.id, api_keys.c.display_name).where( (api_keys.c.user_id == user_id) & (api_keys.c.product_name == product_name) ) @@ -136,7 +139,7 @@ async def get_api_key( user_id: UserID, product_name: ProductName, ) -> ApiKey | None: - async with transaction_context(get_asyncpg_engine(app), connection) as conn: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: stmt = sa.select(api_keys).where( ( api_keys.c.id == int(api_key_id) @@ -145,8 +148,8 @@ async def get_api_key( & (api_keys.c.product_name == product_name) ) - result = await conn.stream(stmt) - row = await result.first() + result = await conn.execute(stmt) + row = result.one_or_none() return ( ApiKey( @@ -180,41 +183,7 @@ async def delete_api_key( await conn.execute(stmt) -async def delete_by_name( - app: web.Application, - connection: AsyncConnection | None = None, - *, - display_name: str, - user_id: UserID, - product_name: ProductName, -) -> None: - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - stmt = api_keys.delete().where( - (api_keys.c.user_id == user_id) - & (api_keys.c.display_name == display_name) - & (api_keys.c.product_name == product_name) - ) - await conn.execute(stmt) - - -async def delete_by_key( - app: web.Application, - connection: AsyncConnection | None = None, - *, - api_key: str, - user_id: UserID, - product_name: ProductName, -) -> None: - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - stmt = api_keys.delete().where( - (api_keys.c.user_id == user_id) - & (api_keys.c.api_key == api_key) - & (api_keys.c.product_name == product_name) - ) - await conn.execute(stmt) - - -async def prune_expired( +async def delete_expired_api_keys( app: web.Application, connection: AsyncConnection | None = None ) -> list[str]: async with transaction_context(get_asyncpg_engine(app), connection) as conn: @@ -226,6 +195,6 @@ async def prune_expired( ) .returning(api_keys.c.display_name) ) - result = await conn.stream(stmt) - rows = [row async for row in result] + result = await conn.execute(stmt) + rows = result.fetchall() return [r.display_name for r in rows] diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_service.py b/services/web/server/src/simcore_service_webserver/api_keys/_service.py index 4d7cdcb43dc..d5648e43060 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_service.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_service.py @@ -9,8 +9,8 @@ from servicelib.utils_secrets import generate_token_secret_key from . import _repository -from ._models import ApiKey from .errors import ApiKeyNotFoundError +from .models import ApiKey _PUNCTUATION_REGEX = re.compile( pattern="[" + re.escape(string.punctuation.replace("_", "")) + "]" @@ -32,8 +32,8 @@ async def create_api_key( *, user_id: UserID, product_name: ProductName, - display_name=str, - expiration=dt.timedelta, + display_name: str, + expiration: dt.timedelta | None, ) -> ApiKey: api_key, api_secret = _generate_api_key_and_secret(display_name) @@ -119,5 +119,5 @@ async def delete_api_key( async def prune_expired_api_keys(app: web.Application) -> list[str]: - names: list[str] = await _repository.prune_expired(app) + names: list[str] = await _repository.delete_expired_api_keys(app) return names diff --git a/services/web/server/src/simcore_service_webserver/api_keys/api.py b/services/web/server/src/simcore_service_webserver/api_keys/api_keys_service.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/api_keys/api.py rename to services/web/server/src/simcore_service_webserver/api_keys/api_keys_service.py diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_models.py b/services/web/server/src/simcore_service_webserver/api_keys/models.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/api_keys/_models.py rename to services/web/server/src/simcore_service_webserver/api_keys/models.py diff --git a/services/web/server/src/simcore_service_webserver/api_keys/plugin.py b/services/web/server/src/simcore_service_webserver/api_keys/plugin.py index 9c8cc742c23..11aeabf1e33 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/plugin.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/plugin.py @@ -3,12 +3,12 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..db.plugin import setup_db from ..products.plugin import setup_products from ..rabbitmq import setup_rabbitmq from ..rest.plugin import setup_rest -from . import _rest, _rpc +from . import _controller_rest, _controller_rpc _logger = logging.getLogger(__name__) @@ -26,9 +26,9 @@ def setup_api_keys(app: web.Application): # http api setup_rest(app) - app.router.add_routes(_rest.routes) + app.router.add_routes(_controller_rest.routes) # rpc api setup_rabbitmq(app) if app[APP_SETTINGS_KEY].WEBSERVER_RABBITMQ: - app.on_startup.append(_rpc.register_rpc_routes_on_startup) + app.on_startup.append(_controller_rpc.register_rpc_routes_on_startup) diff --git a/services/web/server/src/simcore_service_webserver/application.py b/services/web/server/src/simcore_service_webserver/application.py index e8825167f66..073ebb0c08b 100644 --- a/services/web/server/src/simcore_service_webserver/application.py +++ b/services/web/server/src/simcore_service_webserver/application.py @@ -1,6 +1,4 @@ -""" Main application - -""" +"""Main application""" import logging from pprint import pformat @@ -8,6 +6,7 @@ from aiohttp import web from servicelib.aiohttp.application import create_safe_application +from simcore_service_webserver.tasks.plugin import setup_tasks from ._meta import WELCOME_DB_LISTENER_MSG, WELCOME_GC_MSG, WELCOME_MSG, info from .activity.plugin import setup_activity @@ -29,7 +28,6 @@ from .licenses.plugin import setup_licenses from .login.plugin import setup_login from .long_running_tasks import setup_long_running_tasks -from .meta_modeling.plugin import setup_meta_modeling from .notifications.plugin import setup_notifications from .payments.plugin import setup_payments from .products.plugin import setup_products @@ -51,7 +49,6 @@ from .tracing import setup_app_tracing from .trash.plugin import setup_trash from .users.plugin import setup_users -from .version_control.plugin import setup_version_control from .wallets.plugin import setup_wallets from .workspaces.plugin import setup_workspaces @@ -123,6 +120,7 @@ def create_application() -> web.Application: setup_director_v2(app) setup_dynamic_scheduler(app) setup_storage(app) + setup_tasks(app) setup_catalog(app) # resource management @@ -137,9 +135,6 @@ def create_application() -> web.Application: # projects setup_projects(app) - # project add-ons - setup_version_control(app) - setup_meta_modeling(app) # licenses setup_licenses(app) diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index fcf8080123c..d1916698b39 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -11,7 +11,6 @@ AliasChoices, AnyHttpUrl, TypeAdapter, - ValidationInfo, field_validator, model_validator, ) @@ -27,15 +26,16 @@ from settings_library.utils_logging import MixinLoggingSettings from settings_library.utils_service import DEFAULT_AIOHTTP_PORT -from ._constants import APP_SETTINGS_KEY from ._meta import API_VERSION, API_VTAG, APP_NAME from .catalog.settings import CatalogSettings +from .constants import APP_SETTINGS_KEY from .diagnostics.settings import DiagnosticsSettings from .director_v2.settings import DirectorV2Settings from .dynamic_scheduler.settings import DynamicSchedulerSettings from .exporter.settings import ExporterSettings from .garbage_collector.settings import GarbageCollectorSettings from .invitations.settings import InvitationsSettings +from .licenses.settings import LicensesSettings from .login.settings import LoginSettings from .payments.settings import PaymentsSettings from .projects.settings import ProjectsSettings @@ -53,6 +53,12 @@ _logger = logging.getLogger(__name__) +# NOTE: to mark a plugin as a DEV-FEATURE annotated it with +# `Field(json_schema_extra={_X_DEV_FEATURE_FLAG: True})` +# This will force it to be disabled when WEBSERVER_DEV_FEATURES_ENABLED=False +_X_DEV_FEATURE_FLAG: Final[str] = "x-dev-feature" + + class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION @@ -69,22 +75,20 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): SIMCORE_VCS_RELEASE_TAG: Annotated[ str | None, Field( - default=None, description="Name of the tag that marks this release, or None if undefined", examples=["ResistanceIsFutile10"], ), - ] + ] = None SIMCORE_VCS_RELEASE_URL: Annotated[ AnyHttpUrl | None, Field( - default=None, description="URL to release notes", examples=[ "https://github.com/ITISFoundation/osparc-simcore/releases/tag/staging_ResistanceIsFutile10" ], ), - ] + ] = None SWARM_STACK_NAME: Annotated[ str | None, @@ -94,13 +98,12 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): WEBSERVER_DEV_FEATURES_ENABLED: Annotated[ bool, Field( - default=False, description="Enables development features. WARNING: make sure it is disabled in production .env file!", ), - ] + ] = False WEBSERVER_CREDIT_COMPUTATION_ENABLED: Annotated[ - bool, Field(default=False, description="Enables credit computation features.") - ] + bool, Field(description="Enables credit computation features.") + ] = False WEBSERVER_LOGLEVEL: Annotated[ LogLevel, @@ -115,13 +118,12 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ bool, Field( - default=False, validation_alias=AliasChoices( "WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - ] + ] = False WEBSERVER_LOG_FILTER_MAPPING: Annotated[ dict[LoggerName, list[MessageSubstring]], @@ -231,6 +233,14 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] + WEBSERVER_LICENSES: Annotated[ + LicensesSettings | None | bool, + Field( + json_schema_extra={"auto_default_from_env": True}, + # NOTE: `bool` is to keep backwards compatibility + ), + ] + WEBSERVER_LOGIN: Annotated[ LoginSettings | None, Field( @@ -349,8 +359,6 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): WEBSERVER_DB_LISTENER: bool = True WEBSERVER_FOLDERS: bool = True WEBSERVER_GROUPS: bool = True - WEBSERVER_LICENSES: bool = False - WEBSERVER_META_MODELING: bool = True WEBSERVER_NOTIFICATIONS: bool = True WEBSERVER_PRODUCTS: bool = True WEBSERVER_PROFILING: bool = False @@ -358,18 +366,16 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): WEBSERVER_REMOTE_DEBUG: bool = True WEBSERVER_SOCKETIO: bool = True WEBSERVER_TAGS: bool = True - WEBSERVER_VERSION_CONTROL: bool = True WEBSERVER_WALLETS: bool = True WEBSERVER_WORKSPACES: bool = True WEBSERVER_SECURITY: Annotated[ bool, Field( - default=True, description="This is a place-holder for future settings." "Currently this is a system plugin and cannot be disabled", ), - ] + ] = True @model_validator(mode="before") @classmethod @@ -389,32 +395,35 @@ def _build_vcs_release_url_if_unset(cls, values): return values - @field_validator( - # List of plugins under-development (keep up-to-date) - # TODO: consider mark as dev-feature in field extras of Config attr. - # Then they can be automtically advertised - "WEBSERVER_META_MODELING", - "WEBSERVER_VERSION_CONTROL", - mode="before", - ) + @model_validator(mode="before") @classmethod - def _enable_only_if_dev_features_allowed(cls, v, info: ValidationInfo): - """Ensures that plugins 'under development' get programatically - disabled if WEBSERVER_DEV_FEATURES_ENABLED=False - """ - if info.data["WEBSERVER_DEV_FEATURES_ENABLED"]: - return v - if v: - _logger.warning( - "%s still under development and will be disabled.", info.field_name - ) + def _enable_only_if_dev_features_allowed(cls, data: Any) -> Any: + """Force disables plugins marked 'under development' when WEBSERVER_DEV_FEATURES_ENABLED=False""" - return ( - None - if info.field_name and is_nullable(dict(cls.model_fields)[info.field_name]) - else False + dev_features_allowed = TypeAdapter(bool).validate_python( + data.get("WEBSERVER_DEV_FEATURES_ENABLED", False) ) + if dev_features_allowed: + return data + + for field_name, field in cls.model_fields.items(): + json_schema = field.json_schema_extra or {} + if callable(field.json_schema_extra): + json_schema = {} + field.json_schema_extra(json_schema) + + assert isinstance(json_schema, dict) # nosec + if json_schema.get(_X_DEV_FEATURE_FLAG): + _logger.warning( + "'%s' is still under development and will be forcibly disabled [WEBSERVER_DEV_FEATURES_ENABLED=%s].", + field_name, + dev_features_allowed, + ) + data[field_name] = None if is_nullable(field) else False + + return data + @field_validator("WEBSERVER_LOGLEVEL") @classmethod def _valid_log_level(cls, value): @@ -453,12 +462,15 @@ def _get_disabled_public_plugins(self) -> list[str]: "WEBSERVER_EXPORTER", "WEBSERVER_FOLDERS", "WEBSERVER_LICENSES", - "WEBSERVER_META_MODELING", "WEBSERVER_PAYMENTS", "WEBSERVER_SCICRUNCH", - "WEBSERVER_VERSION_CONTROL", } - return [_ for _ in public_plugin_candidates if not self.is_enabled(_)] + return [_ for _ in public_plugin_candidates if not self.is_enabled(_)] + [ + # NOTE: Permanently retired in https://github.com/ITISFoundation/osparc-simcore/pull/7182 + # Kept here to disable front-end + "WEBSERVER_META_MODELING", + "WEBSERVER_VERSION_CONTROL", + ] def _export_by_alias(self, **kwargs) -> dict[str, Any]: # This is a small helper to assist export functions since aliases are no longer used by @@ -520,7 +532,6 @@ def to_client_statics(self) -> dict[str, Any]: "SIMCORE_VCS_RELEASE_TAG": True, "SIMCORE_VCS_RELEASE_URL": True, "SWARM_STACK_NAME": True, - "WEBSERVER_LICENSES": True, "WEBSERVER_LOGIN": { "LOGIN_ACCOUNT_DELETION_RETENTION_DAYS", "LOGIN_2FA_REQUIRED", diff --git a/services/web/server/src/simcore_service_webserver/application_settings_utils.py b/services/web/server/src/simcore_service_webserver/application_settings_utils.py index ca4e27143f2..4adf8936f94 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings_utils.py +++ b/services/web/server/src/simcore_service_webserver/application_settings_utils.py @@ -14,8 +14,8 @@ from pydantic.types import SecretStr from servicelib.aiohttp.typing_extension import Handler -from ._constants import MSG_UNDER_DEVELOPMENT from .application_settings import ApplicationSettings, get_application_settings +from .constants import MSG_UNDER_DEVELOPMENT _logger = logging.getLogger(__name__) @@ -164,7 +164,6 @@ def convert_to_app_config(app_settings: ApplicationSettings) -> AppConfigDict: "director-v2": {"enabled": app_settings.is_enabled("WEBSERVER_DIRECTOR_V2")}, "exporter": {"enabled": app_settings.WEBSERVER_EXPORTER is not None}, "groups": {"enabled": app_settings.WEBSERVER_GROUPS}, - "meta_modeling": {"enabled": app_settings.WEBSERVER_META_MODELING}, "products": {"enabled": app_settings.WEBSERVER_PRODUCTS}, "publications": {"enabled": app_settings.WEBSERVER_PUBLICATIONS}, "remote_debug": {"enabled": app_settings.WEBSERVER_REMOTE_DEBUG}, @@ -180,7 +179,6 @@ def convert_to_app_config(app_settings: ApplicationSettings) -> AppConfigDict: }, "tags": {"enabled": app_settings.WEBSERVER_TAGS}, "users": {"enabled": app_settings.WEBSERVER_USERS is not None}, - "version_control": {"enabled": app_settings.WEBSERVER_VERSION_CONTROL}, "wallets": {"enabled": app_settings.WEBSERVER_WALLETS}, "folders": {"enabled": app_settings.WEBSERVER_FOLDERS}, "workspaces": {"enabled": app_settings.WEBSERVER_WORKSPACES}, @@ -313,7 +311,6 @@ def _set_if_disabled(field_name, section): for settings_name in ( "WEBSERVER_GARBAGE_COLLECTOR", "WEBSERVER_GROUPS", - "WEBSERVER_META_MODELING", "WEBSERVER_PRODUCTS", "WEBSERVER_PROJECTS", "WEBSERVER_PUBLICATIONS", @@ -324,7 +321,6 @@ def _set_if_disabled(field_name, section): "WEBSERVER_STUDIES_DISPATCHER", "WEBSERVER_TAGS", "WEBSERVER_USERS", - "WEBSERVER_VERSION_CONTROL", "WEBSERVER_WALLETS", "WEBSERVER_FOLDERS", ): diff --git a/services/web/server/src/simcore_service_webserver/catalog/client.py b/services/web/server/src/simcore_service_webserver/catalog/_catalog_rest_client_service.py similarity index 85% rename from services/web/server/src/simcore_service_webserver/catalog/client.py rename to services/web/server/src/simcore_service_webserver/catalog/_catalog_rest_client_service.py index 386ae811da0..127cc1d53e5 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/client.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_catalog_rest_client_service.py @@ -1,7 +1,5 @@ -""" Requests to catalog service API +"""Requests to catalog service API""" -""" -import asyncio import logging import urllib.parse from collections.abc import Iterator @@ -23,12 +21,11 @@ from servicelib.aiohttp import status from servicelib.aiohttp.client_session import get_client_session from servicelib.rest_constants import X_PRODUCT_NAME_HEADER -from settings_library.catalog import CatalogSettings from yarl import URL from .._meta import api_version_prefix from ._constants import MSG_CATALOG_SERVICE_NOT_FOUND, MSG_CATALOG_SERVICE_UNAVAILABLE -from .settings import get_plugin_settings +from .settings import CatalogSettings, get_plugin_settings _logger = logging.getLogger(__name__) @@ -47,7 +44,7 @@ def _handle_client_exceptions(app: web.Application) -> Iterator[ClientSession]: reason=MSG_CATALOG_SERVICE_UNAVAILABLE ) from err - except (asyncio.TimeoutError, ClientConnectionError) as err: + except (TimeoutError, ClientConnectionError) as err: _logger.debug("Request to catalog service failed: %s", err) raise web.HTTPServiceUnavailable( reason=MSG_CATALOG_SERVICE_UNAVAILABLE @@ -169,27 +166,3 @@ async def get_service_access_rights( resp.raise_for_status() body = await resp.json() return ServiceAccessRightsGet.model_validate(body) - - -async def update_service( - app: web.Application, - user_id: UserID, - service_key: str, - service_version: str, - product_name: str, - update_data: dict[str, Any], -) -> dict[str, Any]: - settings: CatalogSettings = get_plugin_settings(app) - - url = URL( - f"{settings.api_base_url}/services/{urllib.parse.quote_plus(service_key)}/{service_version}", - encoded=True, - ).with_query({"user_id": user_id}) - - with _handle_client_exceptions(app) as session: - async with session.patch( - url, headers={X_PRODUCT_NAME_HEADER: product_name}, json=update_data - ) as resp: - resp.raise_for_status() - body: dict[str, Any] = await resp.json() - return body diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest.py similarity index 74% rename from services/web/server/src/simcore_service_webserver/catalog/_handlers.py rename to services/web/server/src/simcore_service_webserver/catalog/_controller_rest.py index cdc617c5db3..644edb14d9a 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest.py @@ -1,4 +1,4 @@ -""" rest api handlers +"""rest api handlers - Take into account that part of the API is also needed in the public API so logic should live in the catalog service in his final version @@ -7,26 +7,22 @@ import asyncio import logging -import urllib.parse from typing import Final from aiohttp import web from aiohttp.web import Request, RouteTableDef from models_library.api_schemas_webserver.catalog import ( + CatalogLatestServiceGet, CatalogServiceGet, CatalogServiceUpdate, - ServiceInputKey, - ServiceOutputKey, ) from models_library.api_schemas_webserver.resource_usage import PricingPlanGet -from models_library.rest_pagination import Page, PageQueryParameters +from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data -from models_library.services import ServiceKey, ServiceVersion from models_library.services_resources import ( ServiceResourcesDict, ServiceResourcesDictHelpers, ) -from pydantic import BaseModel, ConfigDict, Field, field_validator from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -39,9 +35,21 @@ from ..resource_usage.service import get_default_service_pricing_plan from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response -from . import _api, _handlers_errors, client -from ._api import CatalogRequestContext -from .exceptions import DefaultPricingUnitForServiceNotFoundError +from . import _catalog_rest_client_service, _service +from ._controller_rest_exceptions import ( + DefaultPricingUnitForServiceNotFoundError, + handle_plugin_requests_exceptions, +) +from ._controller_rest_schemas import ( + CatalogRequestContext, + FromServiceOutputQueryParams, + ListServiceParams, + ServiceInputsPathParams, + ServiceOutputsPathParams, + ServicePathParams, + ServiceTagPathParams, + ToServiceInputsQueryParams, +) _logger = logging.getLogger(__name__) @@ -51,54 +59,32 @@ routes = RouteTableDef() -class ServicePathParams(BaseModel): - service_key: ServiceKey - service_version: ServiceVersion - model_config = ConfigDict( - populate_by_name=True, - extra="forbid", - ) - - @field_validator("service_key", mode="before") - @classmethod - def ensure_unquoted(cls, v): - # NOTE: this is needed as in pytest mode, the aiohttp server does not seem to unquote automatically - if v is not None: - return urllib.parse.unquote(v) - return v - - -class ListServiceParams(PageQueryParameters): - ... - - @routes.get( f"{VTAG}/catalog/services/-/latest", name="list_services_latest", ) @login_required @permission_required("services.catalog.*") -@_handlers_errors.reraise_catalog_exceptions_as_http_errors +@handle_plugin_requests_exceptions async def list_services_latest(request: Request): request_ctx = CatalogRequestContext.create(request) query_params: ListServiceParams = parse_request_query_parameters_as( ListServiceParams, request ) - page_items, page_meta = await _api.list_latest_services( + page_items, page_meta = await _service.list_latest_services( request.app, user_id=request_ctx.user_id, product_name=request_ctx.product_name, unit_registry=request_ctx.unit_registry, - page_params=PageQueryParameters.model_construct( - offset=query_params.offset, limit=query_params.limit - ), + offset=query_params.offset, + limit=query_params.limit, ) assert page_meta.limit == query_params.limit # nosec assert page_meta.offset == query_params.offset # nosec - page = Page[CatalogServiceGet].model_validate( + page = Page[CatalogLatestServiceGet].model_validate( paginate_data( chunk=page_items, request_url=request.url, @@ -116,7 +102,7 @@ async def list_services_latest(request: Request): ) @login_required @permission_required("services.catalog.*") -@_handlers_errors.reraise_catalog_exceptions_as_http_errors +@handle_plugin_requests_exceptions async def get_service(request: Request): request_ctx = CatalogRequestContext.create(request) path_params = parse_request_path_parameters_as(ServicePathParams, request) @@ -124,7 +110,7 @@ async def get_service(request: Request): assert request_ctx # nosec assert path_params # nosec - service = await _api.get_service_v2( + service = await _service.get_service_v2( request.app, user_id=request_ctx.user_id, product_name=request_ctx.product_name, @@ -142,7 +128,7 @@ async def get_service(request: Request): ) @login_required @permission_required("services.catalog.*") -@_handlers_errors.reraise_catalog_exceptions_as_http_errors +@handle_plugin_requests_exceptions async def update_service(request: Request): request_ctx = CatalogRequestContext.create(request) path_params = parse_request_path_parameters_as(ServicePathParams, request) @@ -154,7 +140,7 @@ async def update_service(request: Request): assert path_params # nosec assert update # nosec - updated = await _api.update_service_v2( + updated = await _service.update_service_v2( request.app, user_id=request_ctx.user_id, product_name=request_ctx.product_name, @@ -178,7 +164,7 @@ async def list_service_inputs(request: Request): path_params = parse_request_path_parameters_as(ServicePathParams, request) # Evaluate and return validated model - response_model = await _api.list_service_inputs( + response_model = await _service.list_service_inputs( path_params.service_key, path_params.service_version, ctx ) @@ -188,10 +174,6 @@ async def list_service_inputs(request: Request): ) -class _ServiceInputsPathParams(ServicePathParams): - input_key: ServiceInputKey - - @routes.get( f"{VTAG}/catalog/services/{{service_key}}/{{service_version}}/inputs/{{input_key}}", name="get_service_input", @@ -200,10 +182,10 @@ class _ServiceInputsPathParams(ServicePathParams): @permission_required("services.catalog.*") async def get_service_input(request: Request): ctx = CatalogRequestContext.create(request) - path_params = parse_request_path_parameters_as(_ServiceInputsPathParams, request) + path_params = parse_request_path_parameters_as(ServiceInputsPathParams, request) # Evaluate and return validated model - response_model = await _api.get_service_input( + response_model = await _service.get_service_input( path_params.service_key, path_params.service_version, path_params.input_key, @@ -216,12 +198,6 @@ async def get_service_input(request: Request): ) -class _FromServiceOutputParams(BaseModel): - from_service_key: ServiceKey = Field(..., alias="fromService") - from_service_version: ServiceVersion = Field(..., alias="fromVersion") - from_output_key: ServiceOutputKey = Field(..., alias="fromOutput") - - @routes.get( f"{VTAG}/catalog/services/{{service_key}}/{{service_version}}/inputs:match", name="get_compatible_inputs_given_source_output", @@ -231,12 +207,12 @@ class _FromServiceOutputParams(BaseModel): async def get_compatible_inputs_given_source_output(request: Request): ctx = CatalogRequestContext.create(request) path_params = parse_request_path_parameters_as(ServicePathParams, request) - query_params: _FromServiceOutputParams = parse_request_query_parameters_as( - _FromServiceOutputParams, request + query_params: FromServiceOutputQueryParams = parse_request_query_parameters_as( + FromServiceOutputQueryParams, request ) # Evaluate and return validated model - data = await _api.get_compatible_inputs_given_source_output( + data = await _service.get_compatible_inputs_given_source_output( path_params.service_key, path_params.service_version, query_params.from_service_key, @@ -261,7 +237,7 @@ async def list_service_outputs(request: Request): path_params = parse_request_path_parameters_as(ServicePathParams, request) # Evaluate and return validated model - response_model = await _api.list_service_outputs( + response_model = await _service.list_service_outputs( path_params.service_key, path_params.service_version, ctx ) @@ -271,10 +247,6 @@ async def list_service_outputs(request: Request): ) -class _ServiceOutputsPathParams(ServicePathParams): - output_key: ServiceOutputKey - - @routes.get( f"{VTAG}/catalog/services/{{service_key}}/{{service_version}}/outputs/{{output_key}}", name="get_service_output", @@ -283,10 +255,10 @@ class _ServiceOutputsPathParams(ServicePathParams): @permission_required("services.catalog.*") async def get_service_output(request: Request): ctx = CatalogRequestContext.create(request) - path_params = parse_request_path_parameters_as(_ServiceOutputsPathParams, request) + path_params = parse_request_path_parameters_as(ServiceOutputsPathParams, request) # Evaluate and return validated model - response_model = await _api.get_service_output( + response_model = await _service.get_service_output( path_params.service_key, path_params.service_version, path_params.output_key, @@ -299,12 +271,6 @@ async def get_service_output(request: Request): ) -class _ToServiceInputsParams(BaseModel): - to_service_key: ServiceKey = Field(..., alias="toService") - to_service_version: ServiceVersion = Field(..., alias="toVersion") - to_input_key: ServiceInputKey = Field(..., alias="toInput") - - @routes.get( f"{VTAG}/catalog/services/{{service_key}}/{{service_version}}/outputs:match", name="get_compatible_outputs_given_target_input", @@ -319,11 +285,11 @@ async def get_compatible_outputs_given_target_input(request: Request): """ ctx = CatalogRequestContext.create(request) path_params = parse_request_path_parameters_as(ServicePathParams, request) - query_params: _ToServiceInputsParams = parse_request_query_parameters_as( - _ToServiceInputsParams, request + query_params: ToServiceInputsQueryParams = parse_request_query_parameters_as( + ToServiceInputsQueryParams, request ) - data = await _api.get_compatible_outputs_given_target_input( + data = await _service.get_compatible_outputs_given_target_input( path_params.service_key, path_params.service_version, query_params.to_service_key, @@ -351,11 +317,13 @@ async def get_service_resources(request: Request): """ ctx = CatalogRequestContext.create(request) path_params = parse_request_path_parameters_as(ServicePathParams, request) - service_resources: ServiceResourcesDict = await client.get_service_resources( - request.app, - user_id=ctx.user_id, - service_key=path_params.service_key, - service_version=path_params.service_version, + service_resources: ServiceResourcesDict = ( + await _catalog_rest_client_service.get_service_resources( + request.app, + user_id=ctx.user_id, + service_key=path_params.service_key, + service_version=path_params.service_version, + ) ) data = ServiceResourcesDictHelpers.create_jsonable(service_resources) @@ -370,7 +338,7 @@ async def get_service_resources(request: Request): ) @login_required @permission_required("services.catalog.*") -@_handlers_errors.reraise_catalog_exceptions_as_http_errors +@handle_plugin_requests_exceptions async def get_service_pricing_plan(request: Request): ctx = CatalogRequestContext.create(request) path_params = parse_request_path_parameters_as(ServicePathParams, request) @@ -390,3 +358,43 @@ async def get_service_pricing_plan(request: Request): return envelope_json_response( PricingPlanGet.model_validate(pricing_plan.model_dump()) ) + + +@routes.get( + f"/{API_VTAG}/catalog/services/{{service_key}}/{{service_version}}/tags", + name="list_service_tags", +) +@login_required +@permission_required("service.tag.*") +async def list_service_tags(request: web.Request): + path_params = parse_request_path_parameters_as(ServicePathParams, request) + assert path_params # nosec + raise NotImplementedError + + +@routes.post( + f"/{API_VTAG}/catalog/services/{{service_key}}/{{service_version}}/tags/{{tag_id}}:add", + name="add_service_tag", +) +@login_required +@permission_required("service.tag.*") +async def add_service_tag(request: web.Request): + path_params = parse_request_path_parameters_as(ServiceTagPathParams, request) + assert path_params # nosec + + # responds with parent's resource to get the current state (as with patch/update) + raise NotImplementedError + + +@routes.post( + f"/{API_VTAG}/catalog/services/{{service_key}}/{{service_version}}/tags/{{tag_id}}:remove", + name="remove_service_tag", +) +@login_required +@permission_required("service.tag.*") +async def remove_service_tag(request: web.Request): + path_params = parse_request_path_parameters_as(ServiceTagPathParams, request) + assert path_params # nosec + + # responds with parent's resource to get the current state (as with patch/update) + raise NotImplementedError diff --git a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_exceptions.py new file mode 100644 index 00000000000..134ea554da5 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_exceptions.py @@ -0,0 +1,49 @@ +"""Defines the different exceptions that may arise in the catalog subpackage""" + +from servicelib.aiohttp import status +from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( + CatalogForbiddenError, + CatalogItemNotFoundError, +) + +from ..exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ..resource_usage.errors import DefaultPricingPlanNotFoundError +from .errors import DefaultPricingUnitForServiceNotFoundError + +# mypy: disable-error-code=truthy-function +assert CatalogForbiddenError # nosec +assert CatalogItemNotFoundError # nosec + + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + CatalogItemNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Catalog item not found", + ), + DefaultPricingPlanNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Default pricing plan not found", + ), + DefaultPricingUnitForServiceNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, "Default pricing unit not found" + ), + CatalogForbiddenError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, "Forbidden catalog access" + ), +} + +handle_plugin_requests_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) + + +__all__: tuple[str, ...] = ( + "CatalogForbiddenError", + "CatalogItemNotFoundError", + "DefaultPricingUnitForServiceNotFoundError", +) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_schemas.py b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_schemas.py new file mode 100644 index 00000000000..83c8dbe9fa4 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_schemas.py @@ -0,0 +1,215 @@ +import logging +import urllib.parse +from collections.abc import Callable +from dataclasses import dataclass +from typing import Annotated, Any, Final + +from aiocache import cached # type: ignore[import-untyped] +from aiohttp import web +from aiohttp.web import Request +from models_library.api_schemas_webserver.catalog import ( + ServiceInputGet, + ServiceInputKey, + ServiceOutputGet, + ServiceOutputKey, +) +from models_library.basic_types import IdInt +from models_library.rest_pagination import PageQueryParameters +from models_library.services import BaseServiceIOModel, ServiceKey, ServiceVersion +from models_library.users import UserID +from pint import PintError, Quantity, UnitRegistry +from pydantic import ( + BaseModel, + ConfigDict, + Field, + field_validator, +) +from servicelib.aiohttp.requests_validation import handle_validation_as_http_error + +from ..constants import RQ_PRODUCT_KEY, RQT_USERID_KEY + +_logger = logging.getLogger(__name__) + + +def get_unit_name(port: BaseServiceIOModel) -> str | None: + unit: str | None = port.unit + if port.property_type == "ref_contentSchema": + assert port.content_schema is not None # nosec + # NOTE: content schema might not be resolved (i.e. has $ref!! ) + unit = port.content_schema.get("x_unit", unit) + if unit: + # WARNING: has a special format for prefix. tmp direct replace here + unit = unit.replace("-", "") + elif port.content_schema.get("type") in ("object", "array", None): + # these objects might have unit in its fields + raise NotImplementedError + return unit + + +@dataclass +class UnitHtmlFormat: + short: str + long: str + + +def get_html_formatted_unit( + port: BaseServiceIOModel, ureg: UnitRegistry +) -> UnitHtmlFormat | None: + try: + unit_name = get_unit_name(port) + if unit_name is None: + return None + + q: Quantity = ureg.Quantity(unit_name) + return UnitHtmlFormat(short=f"{q.units:~H}", long=f"{q.units:H}") + except (PintError, NotImplementedError): + return None + + +# +# Transforms from catalog api models -> webserver api models +# +# Uses aiocache (async) instead of cachetools (sync) in order to handle concurrency better +# SEE https://github.com/ITISFoundation/osparc-simcore/pull/6169 +# +_SECOND = 1 # in seconds +_MINUTE = 60 * _SECOND +_CACHE_TTL: Final = 1 * _MINUTE + + +def _hash_inputs(_f: Callable[..., Any], *_args, **kw): + assert not _args # nosec + service: dict[str, Any] = kw["service"] + return f"ServiceInputGetFactory_{service['key']}_{service['version']}_{kw['input_key']}" + + +class ServiceInputGetFactory: + @staticmethod + @cached( + ttl=_CACHE_TTL, + key_builder=_hash_inputs, + ) + async def from_catalog_service_api_model( + *, + service: dict[str, Any], + input_key: ServiceInputKey, + ureg: UnitRegistry | None = None, + ) -> ServiceInputGet: + data = service["inputs"][input_key] + port = ServiceInputGet(key_id=input_key, **data) # validated! + unit_html: UnitHtmlFormat | None + + if ureg and (unit_html := get_html_formatted_unit(port, ureg)): + # we know data is ok since it was validated above + return ServiceInputGet.model_construct( + key_id=input_key, + unit_long=unit_html.long, + unit_short=unit_html.short, + **data, + ) + return port + + +def _hash_outputs(_f: Callable[..., Any], *_args, **kw): + assert not _args # nosec + service: dict[str, Any] = kw["service"] + return f"ServiceOutputGetFactory_{service['key']}/{service['version']}/{kw['output_key']}" + + +class ServiceOutputGetFactory: + @staticmethod + @cached( + ttl=_CACHE_TTL, + key_builder=_hash_outputs, + ) + async def from_catalog_service_api_model( + *, + service: dict[str, Any], + output_key: ServiceOutputKey, + ureg: UnitRegistry | None = None, + ) -> ServiceOutputGet: + data = service["outputs"][output_key] + # NOTE: prunes invalid field that might have remained in database + data.pop("defaultValue", None) + + # NOTE: this call must be validated if port property type is "ref_contentSchema" + port = ServiceOutputGet(key_id=output_key, **data) + + unit_html: UnitHtmlFormat | None + if ureg and (unit_html := get_html_formatted_unit(port, ureg)): + # we know data is ok since it was validated above + return ServiceOutputGet.model_construct( + key_id=output_key, + unit_long=unit_html.long, + unit_short=unit_html.short, + **data, + ) + + return port + + +class CatalogRequestContext(BaseModel): + app: web.Application + user_id: UserID + product_name: str + unit_registry: UnitRegistry + model_config = ConfigDict(arbitrary_types_allowed=True) + + @classmethod + def create(cls, request: Request) -> "CatalogRequestContext": + with handle_validation_as_http_error( + error_msg_template="Invalid request", + resource_name=request.rel_url.path, + use_error_v1=True, + ): + assert request.app # nosec + return cls( + app=request.app, + user_id=request[RQT_USERID_KEY], + product_name=request[RQ_PRODUCT_KEY], + unit_registry=request.app[UnitRegistry.__name__], + ) + + +class ServicePathParams(BaseModel): + service_key: ServiceKey + service_version: ServiceVersion + model_config = ConfigDict( + populate_by_name=True, + extra="forbid", + ) + + @field_validator("service_key", mode="before") + @classmethod + def _ensure_unquoted(cls, v): + # NOTE: this is needed as in pytest mode, the aiohttp server does not seem to unquote automatically + if v is not None: + return urllib.parse.unquote(v) + return v + + +class ListServiceParams(PageQueryParameters): ... + + +class ServiceTagPathParams(ServicePathParams): + tag_id: IdInt + + +class ServiceInputsPathParams(ServicePathParams): + input_key: ServiceInputKey + + +class FromServiceOutputQueryParams(BaseModel): + from_service_key: Annotated[ServiceKey, Field(alias="fromService")] + from_service_version: Annotated[ServiceVersion, Field(alias="fromVersion")] + from_output_key: Annotated[ServiceOutputKey, Field(alias="fromOutput")] + + +class ServiceOutputsPathParams(ServicePathParams): + output_key: ServiceOutputKey + + +class ToServiceInputsQueryParams(BaseModel): + to_service_key: Annotated[ServiceKey, Field(alias="toService")] + to_service_version: Annotated[ServiceVersion, Field(alias="toVersion")] + to_input_key: Annotated[ServiceInputKey, Field(alias="toInput")] diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers_errors.py b/services/web/server/src/simcore_service_webserver/catalog/_handlers_errors.py deleted file mode 100644 index 4a278cc95dc..00000000000 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers_errors.py +++ /dev/null @@ -1,31 +0,0 @@ -import functools - -from aiohttp import web -from servicelib.aiohttp.typing_extension import Handler - -from ..resource_usage.errors import DefaultPricingPlanNotFoundError -from .exceptions import ( - CatalogForbiddenError, - CatalogItemNotFoundError, - DefaultPricingUnitForServiceNotFoundError, -) - - -def reraise_catalog_exceptions_as_http_errors(handler: Handler): - @functools.wraps(handler) - async def _wrapper(request: web.Request) -> web.StreamResponse: - try: - - return await handler(request) - - except ( - CatalogItemNotFoundError, - DefaultPricingPlanNotFoundError, - DefaultPricingUnitForServiceNotFoundError, - ) as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except CatalogForbiddenError as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return _wrapper diff --git a/services/web/server/src/simcore_service_webserver/catalog/_models.py b/services/web/server/src/simcore_service_webserver/catalog/_models.py index af137ba11d8..b589e629a9f 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_models.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_models.py @@ -1,133 +1 @@ -import logging -from collections.abc import Callable -from dataclasses import dataclass -from typing import Any, Final - -from aiocache import cached # type: ignore[import-untyped] -from models_library.api_schemas_webserver.catalog import ( - ServiceInputGet, - ServiceInputKey, - ServiceOutputGet, - ServiceOutputKey, -) -from models_library.services import BaseServiceIOModel -from pint import PintError, Quantity, UnitRegistry - -_logger = logging.getLogger(__name__) - - -def get_unit_name(port: BaseServiceIOModel) -> str | None: - unit: str | None = port.unit - if port.property_type == "ref_contentSchema": - assert port.content_schema is not None # nosec - # NOTE: content schema might not be resolved (i.e. has $ref!! ) - unit = port.content_schema.get("x_unit", unit) - if unit: - # WARNING: has a special format for prefix. tmp direct replace here - unit = unit.replace("-", "") - elif port.content_schema.get("type") in ("object", "array", None): - # these objects might have unit in its fields - raise NotImplementedError - return unit - - -@dataclass -class UnitHtmlFormat: - short: str - long: str - - -def get_html_formatted_unit( - port: BaseServiceIOModel, ureg: UnitRegistry -) -> UnitHtmlFormat | None: - try: - unit_name = get_unit_name(port) - if unit_name is None: - return None - - q: Quantity = ureg.Quantity(unit_name) - return UnitHtmlFormat(short=f"{q.units:~H}", long=f"{q.units:H}") - except (PintError, NotImplementedError): - return None - - -# -# Transforms from catalog api models -> webserver api models -# -# Uses aiocache (async) instead of cachetools (sync) in order to handle concurrency better -# SEE https://github.com/ITISFoundation/osparc-simcore/pull/6169 -# -_SECOND = 1 # in seconds -_MINUTE = 60 * _SECOND -_CACHE_TTL: Final = 1 * _MINUTE - - -def _hash_inputs(_f: Callable[..., Any], *_args, **kw): - assert not _args # nosec - service: dict[str, Any] = kw["service"] - return f"ServiceInputGetFactory_{service['key']}_{service['version']}_{kw['input_key']}" - - -class ServiceInputGetFactory: - @staticmethod - @cached( - ttl=_CACHE_TTL, - key_builder=_hash_inputs, - ) - async def from_catalog_service_api_model( - *, - service: dict[str, Any], - input_key: ServiceInputKey, - ureg: UnitRegistry | None = None, - ) -> ServiceInputGet: - data = service["inputs"][input_key] - port = ServiceInputGet(key_id=input_key, **data) # validated! - unit_html: UnitHtmlFormat | None - - if ureg and (unit_html := get_html_formatted_unit(port, ureg)): - # we know data is ok since it was validated above - return ServiceInputGet.model_construct( - key_id=input_key, - unit_long=unit_html.long, - unit_short=unit_html.short, - **data, - ) - return port - - -def _hash_outputs(_f: Callable[..., Any], *_args, **kw): - assert not _args # nosec - service: dict[str, Any] = kw["service"] - return f"ServiceOutputGetFactory_{service['key']}/{service['version']}/{kw['output_key']}" - - -class ServiceOutputGetFactory: - @staticmethod - @cached( - ttl=_CACHE_TTL, - key_builder=_hash_outputs, - ) - async def from_catalog_service_api_model( - *, - service: dict[str, Any], - output_key: ServiceOutputKey, - ureg: UnitRegistry | None = None, - ) -> ServiceOutputGet: - data = service["outputs"][output_key] - # NOTE: prunes invalid field that might have remained in database - data.pop("defaultValue", None) - - # NOTE: this call must be validated if port property type is "ref_contentSchema" - port = ServiceOutputGet(key_id=output_key, **data) - - unit_html: UnitHtmlFormat | None - if ureg and (unit_html := get_html_formatted_unit(port, ureg)): - # we know data is ok since it was validated above - return ServiceOutputGet.model_construct( - key_id=output_key, - unit_long=unit_html.long, - unit_short=unit_html.short, - **data, - ) - - return port +# NOTE: missing. @bisgaard-itis will follow up here diff --git a/services/web/server/src/simcore_service_webserver/catalog/_api.py b/services/web/server/src/simcore_service_webserver/catalog/_service.py similarity index 71% rename from services/web/server/src/simcore_service_webserver/catalog/_api.py rename to services/web/server/src/simcore_service_webserver/catalog/_service.py index f2fc9be73a9..daa83c1e5d1 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_api.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_service.py @@ -1,11 +1,9 @@ import logging -import warnings from collections.abc import Iterator from typing import Any, cast from aiohttp import web -from aiohttp.web import Request -from models_library.api_schemas_catalog.services import ServiceUpdateV2 +from models_library.api_schemas_catalog.services import MyServiceGet, ServiceUpdateV2 from models_library.api_schemas_webserver.catalog import ( ServiceInputGet, ServiceInputKey, @@ -13,7 +11,11 @@ ServiceOutputKey, ) from models_library.products import ProductName -from models_library.rest_pagination import PageMetaInfoLimitOffset, PageQueryParameters +from models_library.rest_pagination import ( + PageLimitInt, + PageMetaInfoLimitOffset, + PageOffsetInt, +) from models_library.services import ( ServiceInput, ServiceKey, @@ -23,43 +25,23 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pint import UnitRegistry -from pydantic import BaseModel, ConfigDict -from servicelib.aiohttp.requests_validation import handle_validation_as_http_error +from servicelib.rabbitmq._errors import RPCServerError from servicelib.rabbitmq.rpc_interfaces.catalog import services as catalog_rpc +from servicelib.rabbitmq.rpc_interfaces.catalog.errors import CatalogNotAvailableError from servicelib.rest_constants import RESPONSE_MODEL_POLICY -from .._constants import RQ_PRODUCT_KEY, RQT_USERID_KEY from ..rabbitmq import get_rabbitmq_rpc_client -from . import client -from ._api_units import can_connect, replace_service_input_outputs -from ._models import ServiceInputGetFactory, ServiceOutputGetFactory +from . import _catalog_rest_client_service +from ._controller_rest_schemas import ( + CatalogRequestContext, + ServiceInputGetFactory, + ServiceOutputGetFactory, +) +from ._units_service import can_connect, replace_service_input_outputs _logger = logging.getLogger(__name__) -class CatalogRequestContext(BaseModel): - app: web.Application - user_id: UserID - product_name: str - unit_registry: UnitRegistry - model_config = ConfigDict(arbitrary_types_allowed=True) - - @classmethod - def create(cls, request: Request) -> "CatalogRequestContext": - with handle_validation_as_http_error( - error_msg_template="Invalid request", - resource_name=request.rel_url.path, - use_error_v1=True, - ): - assert request.app # nosec - return cls( - app=request.app, - user_id=request[RQT_USERID_KEY], - product_name=request[RQ_PRODUCT_KEY], - unit_registry=request.app[UnitRegistry.__name__], - ) - - async def _safe_replace_service_input_outputs( service: dict[str, Any], unit_registry: UnitRegistry ): @@ -83,16 +65,14 @@ async def _safe_replace_service_input_outputs( ) -# IMPLEMENTATION -------------------------------------------------------------------------------- - - async def list_latest_services( app: web.Application, *, user_id: UserID, product_name: ProductName, unit_registry: UnitRegistry, - page_params: PageQueryParameters, + limit: PageLimitInt, + offset: PageOffsetInt, ) -> tuple[list, PageMetaInfoLimitOffset]: # NOTE: will replace list_services @@ -100,8 +80,8 @@ async def list_latest_services( get_rabbitmq_rpc_client(app), product_name=product_name, user_id=user_id, - limit=page_params.limit, - offset=page_params.offset, + limit=limit, + offset=offset, ) page_data = jsonable_encoder(page.data, exclude_unset=True) @@ -111,6 +91,28 @@ async def list_latest_services( return page_data, page.meta +async def batch_get_my_services( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + services_ids: list[tuple[ServiceKey, ServiceVersion]], +) -> list[MyServiceGet]: + try: + + return await catalog_rpc.batch_get_my_services( + get_rabbitmq_rpc_client(app), + user_id=user_id, + product_name=product_name, + ids=services_ids, + ) + except RPCServerError as err: + raise CatalogNotAvailableError( + user_id=user_id, + product_name=product_name, + ) from err + + async def get_service_v2( app: web.Application, *, @@ -167,75 +169,10 @@ async def update_service_v2( return data -async def list_services( - app: web.Application, - *, - user_id: UserID, - product_name: str, - unit_registry: UnitRegistry, -): - services = await client.get_services_for_user_in_product( - app, user_id, product_name, only_key_versions=False - ) - for service in services: - await _safe_replace_service_input_outputs(service, unit_registry) - - return services - - -async def get_service( - service_key: ServiceKey, service_version: ServiceVersion, ctx: CatalogRequestContext -) -> dict[str, Any]: - - warnings.warn( - "`get_service` is deprecated, use `get_service_v2` instead", - DeprecationWarning, - stacklevel=1, - ) - - service = await client.get_service( - ctx.app, ctx.user_id, service_key, service_version, ctx.product_name - ) - await replace_service_input_outputs( - service, - unit_registry=ctx.unit_registry, - **RESPONSE_MODEL_POLICY, - ) - return service - - -async def update_service( - service_key: ServiceKey, - service_version: ServiceVersion, - update_data: dict[str, Any], - ctx: CatalogRequestContext, -): - warnings.warn( - "`update_service_v2` is deprecated, use `update_service_v2` instead", - DeprecationWarning, - stacklevel=1, - ) - - service = await client.update_service( - ctx.app, - ctx.user_id, - service_key, - service_version, - ctx.product_name, - update_data, - ) - await replace_service_input_outputs( - service, - unit_registry=ctx.unit_registry, - **RESPONSE_MODEL_POLICY, - ) - return service - - async def list_service_inputs( service_key: ServiceKey, service_version: ServiceVersion, ctx: CatalogRequestContext ) -> list[ServiceInputGet]: - service = await client.get_service( + service = await _catalog_rest_client_service.get_service( ctx.app, ctx.user_id, service_key, service_version, ctx.product_name ) return [ @@ -252,7 +189,7 @@ async def get_service_input( input_key: ServiceInputKey, ctx: CatalogRequestContext, ) -> ServiceInputGet: - service = await client.get_service( + service = await _catalog_rest_client_service.get_service( ctx.app, ctx.user_id, service_key, service_version, ctx.product_name ) service_input: ServiceInputGet = ( @@ -294,7 +231,7 @@ async def get_compatible_inputs_given_source_output( def iter_service_inputs() -> Iterator[tuple[ServiceInputKey, ServiceInput]]: for service_input in service_inputs: yield service_input.key_id, ServiceInput.model_construct( - **service_input.model_dump(include=ServiceInput.model_fields.keys()) # type: ignore[arg-type] + **service_input.model_dump(include=ServiceInput.model_fields.keys()) # type: ignore[arg-type] ) # check @@ -311,7 +248,7 @@ async def list_service_outputs( service_version: ServiceVersion, ctx: CatalogRequestContext, ) -> list[ServiceOutputGet]: - service = await client.get_service( + service = await _catalog_rest_client_service.get_service( ctx.app, ctx.user_id, service_key, service_version, ctx.product_name ) return [ @@ -328,7 +265,7 @@ async def get_service_output( output_key: ServiceOutputKey, ctx: CatalogRequestContext, ) -> ServiceOutputGet: - service = await client.get_service( + service = await _catalog_rest_client_service.get_service( ctx.app, ctx.user_id, service_key, service_version, ctx.product_name ) return cast( # mypy -> aiocache is not typed. @@ -361,7 +298,7 @@ def iter_service_outputs() -> Iterator[tuple[ServiceOutputKey, ServiceOutput]]: to_service_key, to_service_version, to_input_key, ctx ) to_input: ServiceInput = ServiceInput.model_construct( - **service_input.model_dump(include=ServiceInput.model_fields.keys()) # type: ignore[arg-type] + **service_input.model_dump(include=ServiceInput.model_fields.keys()) # type: ignore[arg-type] ) # check diff --git a/services/web/server/src/simcore_service_webserver/catalog/_tags_handlers.py b/services/web/server/src/simcore_service_webserver/catalog/_tags_handlers.py deleted file mode 100644 index dc75617f497..00000000000 --- a/services/web/server/src/simcore_service_webserver/catalog/_tags_handlers.py +++ /dev/null @@ -1,59 +0,0 @@ -import logging - -from aiohttp import web -from models_library.basic_types import IdInt -from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as - -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ._handlers import ServicePathParams - -_logger = logging.getLogger(__name__) - - -class ServiceTagPathParams(ServicePathParams): - tag_id: IdInt - - -routes = web.RouteTableDef() - - -@routes.get( - f"/{API_VTAG}/catalog/services/{{service_key}}/{{service_version}}/tags", - name="list_service_tags", -) -@login_required -@permission_required("service.tag.*") -async def list_service_tags(request: web.Request): - path_params = parse_request_path_parameters_as(ServicePathParams, request) - assert path_params # nosec - raise NotImplementedError - - -@routes.post( - f"/{API_VTAG}/catalog/services/{{service_key}}/{{service_version}}/tags/{{tag_id}}:add", - name="add_service_tag", -) -@login_required -@permission_required("service.tag.*") -async def add_service_tag(request: web.Request): - path_params = parse_request_path_parameters_as(ServiceTagPathParams, request) - assert path_params # nosec - - # responds with parent's resource to get the current state (as with patch/update) - raise NotImplementedError - - -@routes.post( - f"/{API_VTAG}/catalog/services/{{service_key}}/{{service_version}}/tags/{{tag_id}}:remove", - name="remove_service_tag", -) -@login_required -@permission_required("service.tag.*") -async def remove_service_tag(request: web.Request): - path_params = parse_request_path_parameters_as(ServiceTagPathParams, request) - assert path_params # nosec - - # responds with parent's resource to get the current state (as with patch/update) - raise NotImplementedError diff --git a/services/web/server/src/simcore_service_webserver/catalog/_api_units.py b/services/web/server/src/simcore_service_webserver/catalog/_units_service.py similarity index 97% rename from services/web/server/src/simcore_service_webserver/catalog/_api_units.py rename to services/web/server/src/simcore_service_webserver/catalog/_units_service.py index 65e435f6886..0a0ddb64103 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_api_units.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_units_service.py @@ -3,7 +3,11 @@ from models_library.services import BaseServiceIOModel, ServiceInput, ServiceOutput from pint import PintError, UnitRegistry -from ._models import ServiceInputGetFactory, ServiceOutputGetFactory, get_unit_name +from ._controller_rest_schemas import ( + ServiceInputGetFactory, + ServiceOutputGetFactory, + get_unit_name, +) def _get_type_name(port: BaseServiceIOModel) -> str: diff --git a/services/web/server/src/simcore_service_webserver/catalog/catalog_service.py b/services/web/server/src/simcore_service_webserver/catalog/catalog_service.py new file mode 100644 index 00000000000..d6e42b376dd --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/catalog/catalog_service.py @@ -0,0 +1,20 @@ +from ._catalog_rest_client_service import ( + get_service, + get_service_access_rights, + get_service_resources, + get_services_for_user_in_product, + is_catalog_service_responsive, + to_backend_service, +) +from ._service import batch_get_my_services + +__all__: tuple[str, ...] = ( + "batch_get_my_services", + "get_service", + "get_service_access_rights", + "get_service_resources", + "get_services_for_user_in_product", + "is_catalog_service_responsive", + "to_backend_service", +) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/catalog/exceptions.py b/services/web/server/src/simcore_service_webserver/catalog/errors.py similarity index 65% rename from services/web/server/src/simcore_service_webserver/catalog/exceptions.py rename to services/web/server/src/simcore_service_webserver/catalog/errors.py index 11f3794661b..23625d772b3 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/catalog/errors.py @@ -1,10 +1,5 @@ """Defines the different exceptions that may arise in the catalog subpackage""" -from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( - CatalogForbiddenError, - CatalogItemNotFoundError, -) - from ..errors import WebServerBaseError @@ -28,15 +23,3 @@ def __init__(self, *, service_key: str, service_version: str, **ctxs): super().__init__(**ctxs) self.service_key = service_key self.service_version = service_version - - -# mypy: disable-error-code=truthy-function -assert CatalogForbiddenError # nosec -assert CatalogItemNotFoundError # nosec - - -__all__: tuple[str, ...] = ( - "CatalogForbiddenError", - "CatalogItemNotFoundError", - "DefaultPricingUnitForServiceNotFoundError", -) diff --git a/services/web/server/src/simcore_service_webserver/catalog/plugin.py b/services/web/server/src/simcore_service_webserver/catalog/plugin.py index 2af8da917f0..b8a5bbce743 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/plugin.py +++ b/services/web/server/src/simcore_service_webserver/catalog/plugin.py @@ -1,6 +1,4 @@ -""" Subsystem to communicate with catalog service - -""" +"""Subsystem to communicate with catalog service""" import logging @@ -8,7 +6,7 @@ from pint import UnitRegistry from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from . import _handlers, _tags_handlers +from . import _controller_rest _logger = logging.getLogger(__name__) @@ -24,11 +22,10 @@ def setup_catalog(app: web.Application): # ensures routes are names that corresponds to function names assert all( # nosec route_def.kwargs["name"] == route_def.handler.__name__ # type: ignore[attr-defined] # route_def is a RouteDef not an Abstract - for route_def in _handlers.routes + for route_def in _controller_rest.routes ) - app.add_routes(_handlers.routes) - app.add_routes(_tags_handlers.routes) + app.add_routes(_controller_rest.routes) # prepares units registry app[UnitRegistry.__name__] = UnitRegistry() diff --git a/services/web/server/src/simcore_service_webserver/catalog/settings.py b/services/web/server/src/simcore_service_webserver/catalog/settings.py index 0687cbcb56e..6e7768b03cc 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/settings.py +++ b/services/web/server/src/simcore_service_webserver/catalog/settings.py @@ -7,7 +7,7 @@ from aiohttp import web from settings_library.catalog import CatalogSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY def get_plugin_settings(app: web.Application) -> CatalogSettings: diff --git a/services/web/server/src/simcore_service_webserver/_constants.py b/services/web/server/src/simcore_service_webserver/constants.py similarity index 56% rename from services/web/server/src/simcore_service_webserver/_constants.py rename to services/web/server/src/simcore_service_webserver/constants.py index 6590592afaf..6c0dae060da 100644 --- a/services/web/server/src/simcore_service_webserver/_constants.py +++ b/services/web/server/src/simcore_service_webserver/constants.py @@ -1,5 +1,4 @@ # pylint:disable=unused-import -# nopycln: file from typing import Final @@ -14,24 +13,52 @@ # Application storage keys APP_PRODUCTS_KEY: Final[str] = f"{__name__ }.APP_PRODUCTS_KEY" -# Request storage keys -RQ_PRODUCT_KEY: Final[str] = f"{__name__}.RQ_PRODUCT_KEY" +# Public config per product returned in /config +APP_PUBLIC_CONFIG_PER_PRODUCT: Final[str] = f"{__name__}.APP_PUBLIC_CONFIG_PER_PRODUCT" + +FRONTEND_APPS_AVAILABLE = frozenset( + # These are the apps built right now by static-webserver/client + { + "osparc", + "s4l", + "s4lacad", + "s4ldesktop", + "s4ldesktopacad", + "s4lengine", + "s4llite", + "tiplite", + "tis", + } +) +FRONTEND_APP_DEFAULT = "osparc" + +assert FRONTEND_APP_DEFAULT in FRONTEND_APPS_AVAILABLE # nosec + # main index route name = front-end INDEX_RESOURCE_NAME: Final[str] = "get_cached_frontend_index" -# Public config per product returned in /config -APP_PUBLIC_CONFIG_PER_PRODUCT: Final[str] = f"{__name__}.APP_PUBLIC_CONFIG_PER_PRODUCT" +MSG_UNDER_DEVELOPMENT: Final[str] = ( + "Under development. Use WEBSERVER_DEV_FEATURES_ENABLED=1 to enable current implementation" +) + -MSG_UNDER_DEVELOPMENT: Final[ - str -] = "Under development. Use WEBSERVER_DEV_FEATURES_ENABLED=1 to enable current implementation" +# Request storage keys +RQ_PRODUCT_KEY: Final[str] = f"{__name__}.RQ_PRODUCT_KEY" +MSG_TRY_AGAIN_OR_SUPPORT: Final[str] = ( + "Please try again shortly. If the issue persists, contact support." +) + __all__: tuple[str, ...] = ( "APP_AIOPG_ENGINE_KEY", "APP_CONFIG_KEY", "APP_FIRE_AND_FORGET_TASKS_KEY", "APP_SETTINGS_KEY", + "FRONTEND_APPS_AVAILABLE", + "FRONTEND_APP_DEFAULT", "RQT_USERID_KEY", ) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/db/_aiopg.py b/services/web/server/src/simcore_service_webserver/db/_aiopg.py index 4a45a0a00fb..9d9feea1f80 100644 --- a/services/web/server/src/simcore_service_webserver/db/_aiopg.py +++ b/services/web/server/src/simcore_service_webserver/db/_aiopg.py @@ -15,7 +15,7 @@ from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY from servicelib.logging_utils import log_context from servicelib.retry_policies import PostgresRetryPolicyUponInitialization -from simcore_postgres_database.errors import DBAPIError +from simcore_postgres_database.aiopg_errors import DBAPIError from simcore_postgres_database.utils_aiopg import ( DBMigrationError, close_engine, diff --git a/services/web/server/src/simcore_service_webserver/db/base_repository.py b/services/web/server/src/simcore_service_webserver/db/base_repository.py index f7c207fb1b0..fc735e97254 100644 --- a/services/web/server/src/simcore_service_webserver/db/base_repository.py +++ b/services/web/server/src/simcore_service_webserver/db/base_repository.py @@ -1,33 +1,26 @@ +from dataclasses import dataclass +from typing import Self + from aiohttp import web -from aiopg.sa.engine import Engine from models_library.users import UserID +from sqlalchemy.ext.asyncio import AsyncEngine -from .._constants import RQT_USERID_KEY -from . import _aiopg +from ..constants import RQT_USERID_KEY +from . import _asyncpg +@dataclass(frozen=True) class BaseRepository: - def __init__(self, engine: Engine, user_id: UserID | None = None): - self._engine = engine - self._user_id = user_id - - assert isinstance(self._engine, Engine) # nosec + engine: AsyncEngine + user_id: UserID | None = None @classmethod - def create_from_request(cls, request: web.Request): + def create_from_request(cls, request: web.Request) -> Self: return cls( - engine=_aiopg.get_database_engine(request.app), + engine=_asyncpg.get_async_engine(request.app), user_id=request.get(RQT_USERID_KEY), ) @classmethod - def create_from_app(cls, app: web.Application): - return cls(engine=_aiopg.get_database_engine(app), user_id=None) - - @property - def engine(self) -> Engine: - return self._engine - - @property - def user_id(self) -> int | None: - return self._user_id + def create_from_app(cls, app: web.Application) -> Self: + return cls(engine=_asyncpg.get_async_engine(app)) diff --git a/services/web/server/src/simcore_service_webserver/db/settings.py b/services/web/server/src/simcore_service_webserver/db/settings.py index 6ba62e8b4d4..b30787bd952 100644 --- a/services/web/server/src/simcore_service_webserver/db/settings.py +++ b/services/web/server/src/simcore_service_webserver/db/settings.py @@ -1,7 +1,7 @@ from aiohttp.web import Application from settings_library.postgres import PostgresSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY def get_plugin_settings(app: Application) -> PostgresSettings: diff --git a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py index 2777fe57e49..f6f3853eaf7 100644 --- a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py @@ -2,6 +2,7 @@ First a procedure is registered in postgres that gets triggered whenever the outputs of a record in comp_task table is changed. """ + import asyncio import json import logging @@ -22,7 +23,7 @@ from sqlalchemy.sql import select from ..db.plugin import get_database_engine -from ..projects import exceptions, projects_service +from ..projects import _projects_service, exceptions from ..projects.nodes_utils import update_node_outputs from ._utils import convert_state_from_db @@ -47,12 +48,14 @@ async def _update_project_state( new_state: RunningState, node_errors: list[ErrorDict] | None, ) -> None: - project = await projects_service.update_project_node_state( + project = await _projects_service.update_project_node_state( app, user_id, project_uuid, node_uuid, new_state ) - await projects_service.notify_project_node_update(app, project, node_uuid, node_errors) - await projects_service.notify_project_state_update(app, project) + await _projects_service.notify_project_node_update( + app, project, node_uuid, node_errors + ) + await _projects_service.notify_project_state_update(app, project) @dataclass(frozen=True) diff --git a/services/web/server/src/simcore_service_webserver/db_listener/plugin.py b/services/web/server/src/simcore_service_webserver/db_listener/plugin.py index a4fda5b69bd..f047491d3a4 100644 --- a/services/web/server/src/simcore_service_webserver/db_listener/plugin.py +++ b/services/web/server/src/simcore_service_webserver/db_listener/plugin.py @@ -1,14 +1,15 @@ """ - computation module is the main entry-point for computational backend +computation module is the main entry-point for computational backend """ + import logging from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from ..db.plugin import setup_db -from ..projects.db import setup_projects_db +from ..projects._projects_repository_legacy import setup_projects_db from ..rabbitmq import setup_rabbitmq from ..socketio.plugin import setup_socketio from ._db_comp_tasks_listening_task import create_comp_tasks_listening_task diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py index bed2f77f7f2..a25b1442d65 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py @@ -1,6 +1,4 @@ -""" Handler functions and routing for diagnostics - -""" +"""Handler functions and routing for diagnostics""" import asyncio import logging @@ -15,13 +13,13 @@ from servicelib.utils import logged_gather from .._meta import API_VERSION, APP_NAME, api_version_prefix -from ..catalog.client import is_catalog_service_responsive +from ..catalog import catalog_service from ..db import plugin -from ..director_v2 import api as director_v2_api +from ..director_v2 import director_v2_service from ..login.decorators import login_required from ..resource_usage._client import is_resource_usage_tracking_service_responsive from ..security.decorators import permission_required -from ..storage import api as storage_api +from ..storage import api as storage_service from ..utils import TaskInfoDict, get_task_info, get_tracemalloc_info from ..utils_aiohttp import envelope_json_response @@ -120,18 +118,18 @@ async def _check_pg(): async def _check_storage(): check.services["storage"] = { - "healthy": await storage_api.is_healthy(request.app), + "healthy": await storage_service.is_healthy(request.app), "status_url": _get_url_for("get_service_status", service_name="storage"), } async def _check_director2(): check.services["director_v2"] = { - "healthy": await director_v2_api.is_healthy(request.app) + "healthy": await director_v2_service.is_healthy(request.app) } async def _check_catalog(): check.services["catalog"] = { - "healthy": await is_catalog_service_responsive(request.app) + "healthy": await catalog_service.is_catalog_service_responsive(request.app) } async def _check_resource_usage_tracker(): @@ -160,7 +158,7 @@ async def get_service_status(request: web.Request): if service_name == "storage": with suppress(ClientError): - status = await storage_api.get_app_status(request.app) + status = await storage_service.get_app_status(request.app) return envelope_json_response(status) raise web.HTTPNotFound diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py b/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py index a9bcb1306b8..8c843699bd5 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py @@ -6,8 +6,8 @@ from servicelib.aiohttp import monitor_slow_callbacks from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.aiohttp.profiler_middleware import profiling_middleware -from simcore_service_webserver.application_settings import get_application_settings +from ..application_settings import get_application_settings from ..rest.healthcheck import HealthCheck from ..rest.plugin import setup_rest from . import _handlers diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_abc.py b/services/web/server/src/simcore_service_webserver/director_v2/_abc.py index 636e36e3f86..864a7ac7faa 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_abc.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_abc.py @@ -1,9 +1,10 @@ from abc import ABC, abstractmethod +from typing import TypeAlias from aiohttp import web from models_library.projects import ProjectID -CommitID = int +CommitID: TypeAlias = int _APP_PROJECT_RUN_POLICY_KEY = f"{__name__}.ProjectRunPolicy" diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_api_utils.py b/services/web/server/src/simcore_service_webserver/director_v2/_api_utils.py index 74bc8e8ee14..7c8328c9929 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_api_utils.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_api_utils.py @@ -5,11 +5,11 @@ from pydantic import TypeAdapter from ..application_settings import get_application_settings -from ..products.api import Product +from ..products.models import Product from ..projects import api as projects_api from ..users import preferences_api as user_preferences_api from ..users.exceptions import UserDefaultWalletNotFoundError -from ..wallets import api as wallets_api +from ..wallets import api as wallets_service async def get_wallet_info( @@ -49,7 +49,7 @@ async def get_wallet_info( project_wallet_id = project_wallet.wallet_id # Check whether user has access to the wallet - wallet = await wallets_api.get_wallet_with_available_credits_by_user_and_wallet( + wallet = await wallets_service.get_wallet_with_available_credits_by_user_and_wallet( app, user_id=user_id, wallet_id=project_wallet_id, diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py b/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py index 7785f7936d2..93fe967bea9 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py @@ -21,7 +21,7 @@ from servicelib.aiohttp import status from servicelib.logging_utils import log_decorator -from ..products.api import get_product +from ..products import products_service from ._api_utils import get_wallet_info from ._core_base import DataType, request_director_v2 from .exceptions import ComputationNotFoundError, DirectorServiceError @@ -107,7 +107,7 @@ async def create_or_update_pipeline( "product_name": product_name, "wallet_info": await get_wallet_info( app, - product=get_product(app, product_name), + product=products_service.get_product(app, product_name), user_id=user_id, project_id=project_id, product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_core_utils.py b/services/web/server/src/simcore_service_webserver/director_v2/_core_utils.py index b106f2c6728..1e342c21752 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_core_utils.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_core_utils.py @@ -11,7 +11,7 @@ from aiohttp import ClientTimeout, web from models_library.projects import ProjectID -from ._abc import AbstractProjectRunPolicy +from ._abc import AbstractProjectRunPolicy, CommitID from .settings import DirectorV2Settings, get_client_session, get_plugin_settings log = logging.getLogger(__name__) @@ -54,7 +54,7 @@ async def get_or_create_runnable_projects( self, request: web.Request, project_uuid: ProjectID, - ) -> tuple[list[ProjectID], list[int]]: + ) -> tuple[list[ProjectID], list[CommitID]]: """ Returns ids and refid of projects that can run If project_uuid is a std-project, then it returns itself diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py index bbd07f5c654..32c7fc8b132 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py @@ -24,13 +24,12 @@ from ..db.plugin import get_database_engine from ..login.decorators import login_required from ..models import RequestContext -from ..products import api as products_api +from ..products import products_web from ..security.decorators import permission_required from ..users.exceptions import UserDefaultWalletNotFoundError from ..utils_aiohttp import envelope_json_response -from ..version_control.models import CommitID from ..wallets.errors import WalletNotEnoughCreditsError -from ._abc import get_project_run_policy +from ._abc import CommitID, get_project_run_policy from ._api_utils import get_wallet_info from ._core_computations import ComputationsApi from .exceptions import DirectorServiceError @@ -89,7 +88,7 @@ async def start_computation(request: web.Request) -> web.Response: ) # Get wallet information - product = products_api.get_current_product(request) + product = products_web.get_current_product(request) wallet_info = await get_wallet_info( request.app, product=product, diff --git a/services/web/server/src/simcore_service_webserver/director_v2/api.py b/services/web/server/src/simcore_service_webserver/director_v2/director_v2_service.py similarity index 87% rename from services/web/server/src/simcore_service_webserver/director_v2/api.py rename to services/web/server/src/simcore_service_webserver/director_v2/director_v2_service.py index 2ecbb1446fd..4a1a26a8a20 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/api.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/director_v2_service.py @@ -1,8 +1,3 @@ -""" plugin API - -PLEASE avoid importing from any other module to access this plugin's functionality -""" - from ._abc import ( AbstractProjectRunPolicy, get_project_run_policy, @@ -22,9 +17,9 @@ # director-v2 module internal API __all__: tuple[str, ...] = ( "AbstractProjectRunPolicy", + "DirectorServiceError", "create_or_update_pipeline", "delete_pipeline", - "DirectorServiceError", "get_batch_tasks_outputs", "get_computation_task", "get_project_run_policy", diff --git a/services/web/server/src/simcore_service_webserver/director_v2/settings.py b/services/web/server/src/simcore_service_webserver/director_v2/settings.py index 31fc096a5dd..79429dbd696 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/settings.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/settings.py @@ -13,7 +13,7 @@ from settings_library.utils_service import DEFAULT_FASTAPI_PORT, MixinServiceSettings from yarl import URL -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY _MINUTE = 60 _HOUR = 60 * _MINUTE diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py index 5773052010b..83a90e286c2 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py @@ -17,7 +17,6 @@ NodeGetIdle, NodeGetUnknown, ) -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -132,7 +131,7 @@ async def stop_dynamic_services_in_project( user_id, project_id, ), - description=IDStr("stopping services"), + description="stopping services", ) ) @@ -147,7 +146,7 @@ async def stop_dynamic_services_in_project( save_state=save_state, ), progress=progress_bar.sub_progress( - 1, description=IDStr(f"{service.node_uuid}") + 1, description=f"{service.node_uuid}" ), ) for service in running_dynamic_services diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py index 2dec18abcd8..905026d97be 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py @@ -6,9 +6,9 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from simcore_service_webserver.rabbitmq import setup_rabbitmq -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY +from ..rabbitmq import setup_rabbitmq _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py index 5f33995a89e..b2f1cec26f5 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/settings.py @@ -5,7 +5,7 @@ from settings_library.base import BaseCustomSettings from settings_library.utils_service import MixinServiceSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class DynamicSchedulerSettings(BaseCustomSettings, MixinServiceSettings): diff --git a/services/web/server/src/simcore_service_webserver/email/_handlers.py b/services/web/server/src/simcore_service_webserver/email/_handlers.py index 84126852347..6b195dc54e8 100644 --- a/services/web/server/src/simcore_service_webserver/email/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/email/_handlers.py @@ -8,7 +8,8 @@ from .._meta import API_VTAG from ..login.decorators import login_required -from ..products.api import Product, get_current_product, get_product_template_path +from ..products import products_web +from ..products.models import Product from ..security.decorators import permission_required from ..utils import get_traceback_string from ..utils_aiohttp import envelope_json_response @@ -30,7 +31,6 @@ class TestEmail(BaseModel): "change_email_email.jinja2", "new_2fa_code.jinja2", "registration_email.jinja2", - "reset_password_email_failed.jinja2", "reset_password_email.jinja2", "service_submission.jinja2", ] = "registration_email.jinja2" @@ -72,9 +72,9 @@ async def test_email(request: web.Request): body = await parse_request_body_as(TestEmail, request) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) - template_path = await get_product_template_path( + template_path = await products_web.get_product_template_path( request, filename=body.template_name ) diff --git a/services/web/server/src/simcore_service_webserver/email/settings.py b/services/web/server/src/simcore_service_webserver/email/settings.py index 4657998f7c8..bd952059261 100644 --- a/services/web/server/src/simcore_service_webserver/email/settings.py +++ b/services/web/server/src/simcore_service_webserver/email/settings.py @@ -1,7 +1,7 @@ from aiohttp import web from settings_library.email import SMTPSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY def get_plugin_settings(app: web.Application) -> SMTPSettings: diff --git a/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py b/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py index baae399f76b..29abf093cd0 100644 --- a/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py +++ b/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py @@ -80,7 +80,7 @@ async def _exception_handler( _DefaultDict(getattr(exception, "__dict__", {})) ) - error = ErrorGet.model_construct(message=user_msg) + error = ErrorGet.model_construct(message=user_msg, status=status_code) if is_5xx_server_error(status_code): oec = create_error_code(exception) @@ -90,14 +90,19 @@ async def _exception_handler( error=exception, error_code=oec, error_context={ + # NOTE: context is also used to substitute tokens in the error message + # e.g. "support error is {error_code}" "request": request, "request.remote": f"{request.remote}", "request.method": f"{request.method}", "request.path": f"{request.path}", + "error_code": oec, }, ) ) - error = ErrorGet.model_construct(message=user_msg, support_id=oec) + error = ErrorGet.model_construct( + message=user_msg, support_id=oec, status=status_code + ) return create_error_response(error, status_code=status_code) diff --git a/services/web/server/src/simcore_service_webserver/exporter/_formatter/_sds.py b/services/web/server/src/simcore_service_webserver/exporter/_formatter/_sds.py index 62f02f2b1d1..806e33b5df5 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/_formatter/_sds.py +++ b/services/web/server/src/simcore_service_webserver/exporter/_formatter/_sds.py @@ -7,10 +7,10 @@ from aiohttp import web from servicelib.pools import non_blocking_process_pool_executor -from ...catalog.client import get_service +from ...catalog import catalog_service +from ...projects._projects_service import get_project_for_user from ...projects.exceptions import BaseProjectError from ...projects.models import ProjectDict -from ...projects.projects_service import get_project_for_user from ...scicrunch.db import ResearchResourceRepository from ..exceptions import SDSException from .template_json import write_template_json @@ -183,7 +183,7 @@ async def create_sds_directory( service_version = entry["version"] label = entry["label"] - service_data = await get_service( + service_data = await catalog_service.get_service( app=app, user_id=user_id, service_key=service_key, diff --git a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py index d0e0d975f6c..db7466c8e73 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py @@ -12,10 +12,10 @@ from servicelib.redis import with_project_locked from servicelib.request_keys import RQT_USERID_KEY -from .._constants import RQ_PRODUCT_KEY from .._meta import API_VTAG +from ..constants import RQ_PRODUCT_KEY from ..login.decorators import login_required -from ..projects.projects_service import create_user_notification_cb +from ..projects._projects_service import create_user_notification_cb from ..redis import get_redis_lock_manager_client_sdk from ..security.decorators import permission_required from ..users.api import get_user_fullname diff --git a/services/web/server/src/simcore_service_webserver/folders/_common/models.py b/services/web/server/src/simcore_service_webserver/folders/_common/models.py index 551c531d74c..b48588d4d59 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/folders/_common/models.py @@ -20,7 +20,7 @@ from models_library.workspaces import WorkspaceID from pydantic import BaseModel, BeforeValidator, ConfigDict, Field -from ..._constants import RQ_PRODUCT_KEY, RQT_USERID_KEY +from ...constants import RQ_PRODUCT_KEY, RQT_USERID_KEY _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py b/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py index 2fd6cb85404..f57a1c6df84 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py @@ -1,16 +1,11 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" - import logging +from collections.abc import Callable from datetime import datetime -from typing import Final, cast +from typing import cast import sqlalchemy as sa from aiohttp import web -from common_library.exclude import UnSet, as_dict_exclude_unset +from common_library.exclude import UnSet, as_dict_exclude_unset, is_set from models_library.folders import ( FolderDB, FolderID, @@ -37,18 +32,16 @@ from simcore_postgres_database.utils_workspaces_sql import ( create_my_workspace_access_rights_subquery, ) -from sqlalchemy import func +from sqlalchemy import sql from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.orm import aliased -from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, asc, desc, select +from sqlalchemy.sql import ColumnElement, CompoundSelect, Select from ..db.plugin import get_asyncpg_engine from .errors import FolderAccessForbiddenError, FolderNotFoundError _logger = logging.getLogger(__name__) -_unset: Final = UnSet() - _FOLDER_DB_MODEL_COLS = get_columns_from_db_model(folders_v2, FolderDB) @@ -78,8 +71,8 @@ async def create( user_id=user_id, workspace_id=workspace_id, created_by_gid=created_by_gid, - created=func.now(), - modified=func.now(), + created=sql.func.now(), + modified=sql.func.now(), ) .returning(*_FOLDER_DB_MODEL_COLS) ) @@ -98,9 +91,9 @@ def _create_private_workspace_query( WorkspaceScope.ALL, ) return ( - select( + sql.select( *_FOLDER_DB_MODEL_COLS, - func.json_build_object( + sql.func.json_build_object( "read", sa.text("true"), "write", @@ -135,7 +128,7 @@ def _create_shared_workspace_query( ) shared_workspace_query = ( - select( + sql.select( *_FOLDER_DB_MODEL_COLS, workspace_access_rights_subquery.c.my_access_rights, ) @@ -163,6 +156,14 @@ def _create_shared_workspace_query( return shared_workspace_query +def _to_expression(order_by: OrderBy): + direction_func: Callable = { + OrderDirection.ASC: sql.asc, + OrderDirection.DESC: sql.desc, + }[order_by.direction] + return direction_func(folders_v2.columns[order_by.field]) + + async def list_( # pylint: disable=too-many-arguments,too-many-branches app: web.Application, connection: AsyncConnection | None = None, @@ -240,16 +241,12 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches raise ValueError(msg) # Select total count from base_query - count_query = select(func.count()).select_from(combined_query.subquery()) + count_query = sql.select(sql.func.count()).select_from(combined_query.subquery()) # Ordering and pagination - if order_by.direction == OrderDirection.ASC: - list_query = combined_query.order_by(asc(getattr(folders_v2.c, order_by.field))) - else: - list_query = combined_query.order_by( - desc(getattr(folders_v2.c, order_by.field)) - ) - list_query = list_query.offset(offset).limit(limit) + list_query = ( + combined_query.order_by(_to_expression(order_by)).offset(offset).limit(limit) + ) async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: total_count = await conn.scalar(count_query) @@ -261,8 +258,57 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches return cast(int, total_count), folders +async def list_trashed_folders( + app: web.Application, + connection: AsyncConnection | None = None, + *, + # filter + trashed_explicitly: bool | UnSet = UnSet.VALUE, + trashed_before: datetime | UnSet = UnSet.VALUE, + # pagination + offset: NonNegativeInt, + limit: int, + # order + order_by: OrderBy, +) -> tuple[int, list[FolderDB]]: + """ + NOTE: this is app-wide i.e. no product, user or workspace filtered + TODO: check with MD about workspaces + """ + base_query = sql.select(*_FOLDER_DB_MODEL_COLS).where( + folders_v2.c.trashed.is_not(None) + ) + + if is_set(trashed_explicitly): + assert isinstance(trashed_explicitly, bool) # nosec + base_query = base_query.where( + folders_v2.c.trashed_explicitly.is_(trashed_explicitly) + ) + + if is_set(trashed_before): + assert isinstance(trashed_before, datetime) # nosec + base_query = base_query.where(folders_v2.c.trashed < trashed_before) + + # Select total count from base_query + count_query = sql.select(sql.func.count()).select_from(base_query.subquery()) + + # Ordering and pagination + list_query = ( + base_query.order_by(_to_expression(order_by)).offset(offset).limit(limit) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) + + result = await conn.stream(list_query) + folders: list[FolderDB] = [FolderDB.model_validate(row) async for row in result] + return cast(int, total_count), folders + + def _create_base_select_query(folder_id: FolderID, product_name: ProductName) -> Select: - return select(*_FOLDER_DB_MODEL_COLS,).where( + return sql.select( + *_FOLDER_DB_MODEL_COLS, + ).where( (folders_v2.c.product_name == product_name) & (folders_v2.c.folder_id == folder_id) ) @@ -349,7 +395,7 @@ async def update( ) query = ( - (folders_v2.update().values(modified=func.now(), **updated)) + (folders_v2.update().values(modified=sql.func.now(), **updated)) .where(folders_v2.c.product_name == product_name) .returning(*_FOLDER_DB_MODEL_COLS) ) @@ -378,7 +424,7 @@ async def delete_recursively( ) -> None: async with transaction_context(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id @@ -388,7 +434,7 @@ async def delete_recursively( # Step 2: Define the recursive case folder_alias = aliased(folders_v2) - recursive_query = select( + recursive_query = sql.select( folder_alias.c.folder_id, folder_alias.c.parent_folder_id ).select_from( folder_alias.join( @@ -401,7 +447,7 @@ async def delete_recursively( folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] rows = [row async for row in result] @@ -436,7 +482,7 @@ async def get_projects_recursively_only_if_user_is_owner( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id @@ -446,7 +492,7 @@ async def get_projects_recursively_only_if_user_is_owner( # Step 2: Define the recursive case folder_alias = aliased(folders_v2) - recursive_query = select( + recursive_query = sql.select( folder_alias.c.folder_id, folder_alias.c.parent_folder_id ).select_from( folder_alias.join( @@ -459,13 +505,13 @@ async def get_projects_recursively_only_if_user_is_owner( folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] folder_ids = [item[0] async for item in result] query = ( - select(projects_to_folders.c.project_uuid) + sql.select(projects_to_folders.c.project_uuid) .join(projects) .where( (projects_to_folders.c.folder_id.in_(folder_ids)) @@ -494,7 +540,7 @@ async def get_all_folders_and_projects_ids_recursively( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id @@ -504,7 +550,7 @@ async def get_all_folders_and_projects_ids_recursively( # Step 2: Define the recursive case folder_alias = aliased(folders_v2) - recursive_query = select( + recursive_query = sql.select( folder_alias.c.folder_id, folder_alias.c.parent_folder_id ).select_from( folder_alias.join( @@ -517,12 +563,12 @@ async def get_all_folders_and_projects_ids_recursively( folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] folder_ids = [item.folder_id async for item in result] - query = select(projects_to_folders.c.project_uuid).where( + query = sql.select(projects_to_folders.c.project_uuid).where( (projects_to_folders.c.folder_id.in_(folder_ids)) & (projects_to_folders.c.user_id == private_workspace_user_id_or_none) ) @@ -543,7 +589,7 @@ async def get_folders_recursively( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE - base_query = select( + base_query = sql.select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id ).where( (folders_v2.c.folder_id == folder_id) # <-- specified folder id @@ -553,7 +599,7 @@ async def get_folders_recursively( # Step 2: Define the recursive case folder_alias = aliased(folders_v2) - recursive_query = select( + recursive_query = sql.select( folder_alias.c.folder_id, folder_alias.c.parent_folder_id ).select_from( folder_alias.join( @@ -566,13 +612,16 @@ async def get_folders_recursively( folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) # Step 4: Execute the query to get all descendants - final_query = select(folder_hierarchy_cte) + final_query = sql.select(folder_hierarchy_cte) result = await conn.stream(final_query) return cast(list[FolderID], [row.folder_id async for row in result]) def _select_trashed_by_primary_gid_query(): - return sa.select(users.c.primary_gid.label("trashed_by_primary_gid")).select_from( + return sa.sql.select( + folders_v2.c.folder_id, + users.c.primary_gid.label("trashed_by_primary_gid"), + ).select_from( folders_v2.outerjoin(users, folders_v2.c.trashed_by == users.c.id), ) @@ -589,7 +638,7 @@ async def get_trashed_by_primary_gid( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: result = await conn.execute(query) - row = result.first() + row = result.one_or_none() return row.trashed_by_primary_gid if row else None @@ -617,4 +666,6 @@ async def batch_get_trashed_by_primary_gid( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: result = await conn.stream(query) - return [row.trashed_by_primary_gid async for row in result] + rows = {row.folder_id: row.trashed_by_primary_gid async for row in result} + + return [rows.get(folder_id) for folder_id in folders_ids] diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_service.py b/services/web/server/src/simcore_service_webserver/folders/_folders_service.py index 26e50075959..ffe542dccf7 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_service.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_service.py @@ -15,7 +15,7 @@ from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.utils import fire_and_forget_task -from ..projects.projects_service import submit_delete_project_task +from ..projects._projects_service import submit_delete_project_task from ..users.api import get_user from ..workspaces.api import check_user_workspace_access from ..workspaces.errors import ( @@ -352,14 +352,14 @@ async def delete_folder( # 1. Delete folder content # 1.1 Delete all child projects that I am an owner - project_id_list: list[ - ProjectID - ] = await _folders_repository.get_projects_recursively_only_if_user_is_owner( - app, - folder_id=folder_id, - private_workspace_user_id_or_none=user_id if workspace_is_private else None, - user_id=user_id, - product_name=product_name, + project_id_list: list[ProjectID] = ( + await _folders_repository.get_projects_recursively_only_if_user_is_owner( + app, + folder_id=folder_id, + private_workspace_user_id_or_none=user_id if workspace_is_private else None, + user_id=user_id, + product_name=product_name, + ) ) # fire and forget task for project deletion diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_rest.py b/services/web/server/src/simcore_service_webserver/folders/_trash_rest.py index 0e035012adb..6540a43eef9 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_trash_rest.py +++ b/services/web/server/src/simcore_service_webserver/folders/_trash_rest.py @@ -9,7 +9,7 @@ from .._meta import API_VTAG as VTAG from ..login.decorators import get_user_id, login_required -from ..products.api import get_product_name +from ..products import products_web from ..security.decorators import permission_required from . import _trash_service from ._common.exceptions_handlers import handle_plugin_requests_exceptions @@ -27,7 +27,7 @@ @handle_plugin_requests_exceptions async def trash_folder(request: web.Request): user_id = get_user_id(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) query_params: FolderTrashQueryParams = parse_request_query_parameters_as( FolderTrashQueryParams, request @@ -50,7 +50,7 @@ async def trash_folder(request: web.Request): @handle_plugin_requests_exceptions async def untrash_folder(request: web.Request): user_id = get_user_id(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) await _trash_service.untrash_folder( diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_service.py b/services/web/server/src/simcore_service_webserver/folders/_trash_service.py index ba1c9f74920..f5003ee3dba 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/folders/_trash_service.py @@ -3,9 +3,14 @@ import arrow from aiohttp import web -from models_library.folders import FolderID +from common_library.pagination_tools import iter_pagination_params +from models_library.access_rights import AccessRights +from models_library.basic_types import IDStr +from models_library.folders import FolderDB, FolderID from models_library.products import ProductName from models_library.projects import ProjectID +from models_library.rest_ordering import OrderBy, OrderDirection +from models_library.rest_pagination import MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE from models_library.users import UserID from simcore_postgres_database.utils_repos import transaction_context from sqlalchemy.ext.asyncio import AsyncConnection @@ -13,7 +18,8 @@ from ..db.plugin import get_asyncpg_engine from ..projects._trash_service import trash_project, untrash_project from ..workspaces.api import check_user_workspace_access -from . import _folders_repository +from . import _folders_repository, _folders_service +from .errors import FolderBatchDeleteError, FolderNotTrashedError _logger = logging.getLogger(__name__) @@ -186,3 +192,135 @@ async def untrash_folder( await untrash_project( app, product_name=product_name, user_id=user_id, project_id=project_id ) + + +def _can_delete( + folder_db: FolderDB, + my_access_rights: AccessRights, + user_id: UserID, + until_equal_datetime: datetime | None, +) -> bool: + return bool( + folder_db.trashed + and (until_equal_datetime is None or folder_db.trashed < until_equal_datetime) + and my_access_rights.delete + and folder_db.trashed_by == user_id + and folder_db.trashed_explicitly + ) + + +async def list_explicitly_trashed_folders( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + until_equal_datetime: datetime | None = None, +) -> list[FolderID]: + trashed_folder_ids: list[FolderID] = [] + + for page_params in iter_pagination_params(limit=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE): + ( + folders, + page_params.total_number_of_items, + ) = await _folders_service.list_folders_full_depth( + app, + user_id=user_id, + product_name=product_name, + text=None, + trashed=True, # NOTE: lists only expliclty trashed! + offset=page_params.offset, + limit=page_params.limit, + order_by=OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC), + ) + + # NOTE: Applying POST-FILTERING + trashed_folder_ids.extend( + [ + f.folder_db.folder_id + for f in folders + if _can_delete( + f.folder_db, + my_access_rights=f.my_access_rights, + user_id=user_id, + until_equal_datetime=until_equal_datetime, + ) + ] + ) + return trashed_folder_ids + + +async def delete_trashed_folder( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + folder_id: FolderID, + until_equal_datetime: datetime | None = None, +) -> None: + + folder = await _folders_service.get_folder( + app, user_id=user_id, folder_id=folder_id, product_name=product_name + ) + + if not _can_delete( + folder.folder_db, + folder.my_access_rights, + user_id=user_id, + until_equal_datetime=until_equal_datetime, + ): + raise FolderNotTrashedError( + folder_id=folder_id, + user_id=user_id, + reason="Cannot delete trashed folder since it does not fit current criteria", + ) + + # NOTE: this function deletes folder AND its content recursively! + await _folders_service.delete_folder( + app, user_id=user_id, folder_id=folder_id, product_name=product_name + ) + + +async def batch_delete_trashed_folders_as_admin( + app: web.Application, + trashed_before: datetime, + *, + product_name: ProductName, + fail_fast: bool, +) -> None: + """ + Raises: + FolderBatchDeleteError: if error and fail_fast=False + Exception: any other exception during delete_recursively + """ + errors: list[tuple[FolderID, Exception]] = [] + + for page_params in iter_pagination_params(limit=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE): + ( + page_params.total_number_of_items, + expired_trashed_folders, + ) = await _folders_repository.list_trashed_folders( + app, + trashed_explicitly=True, + trashed_before=trashed_before, + offset=page_params.offset, + limit=page_params.limit, + order_by=OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC), + ) + + # BATCH delete + for folder in expired_trashed_folders: + try: + await _folders_repository.delete_recursively( + app, folder_id=folder.folder_id, product_name=product_name + ) + # NOTE: projects in folders are NOT deleted + + except Exception as err: # pylint: disable=broad-exception-caught + if fail_fast: + raise + errors.append((folder.folder_id, err)) + + if errors: + raise FolderBatchDeleteError( + errors=errors, trashed_before=trashed_before, product_name=product_name + ) diff --git a/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py b/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py index bac38edb7ca..9535ec3fd7c 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py +++ b/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py @@ -8,10 +8,10 @@ from simcore_postgres_database.utils_repos import transaction_context from ..db.plugin import get_asyncpg_engine -from ..projects import _folders_db as project_to_folders_db -from ..projects import _groups_db as project_groups_db -from ..projects import _projects_db as projects_db -from ..projects._access_rights_api import check_user_project_permission +from ..projects import _folders_repository as projects_folders_repository +from ..projects import _groups_repository as projects_groups_repository +from ..projects import _projects_repository as _projects_repository +from ..projects._access_rights_service import check_user_project_permission from ..users.api import get_user from ..workspaces.api import check_user_workspace_access from . import _folders_repository @@ -78,7 +78,7 @@ async def move_folder_into_workspace( async with transaction_context(get_asyncpg_engine(app)) as conn: # 4. Update workspace ID on the project resource for project_id in project_ids: - await projects_db.patch_project( + await _projects_repository.patch_project( app=app, connection=conn, project_uuid=project_id, @@ -106,7 +106,7 @@ async def move_folder_into_workspace( # 7. Remove all records of project to folders that are not in the folders that we are moving # (ex. If we are moving from private workspace, the same project can be in different folders for different users) - await project_to_folders_db.delete_all_project_to_folder_by_project_ids_not_in_folder_ids( + await projects_folders_repository.delete_all_project_to_folder_by_project_ids_not_in_folder_ids( app, connection=conn, project_id_or_ids=set(project_ids), @@ -114,7 +114,7 @@ async def move_folder_into_workspace( ) # 8. Update the user id field for the remaining folders - await project_to_folders_db.update_project_to_folder( + await projects_folders_repository.update_project_to_folder( app, connection=conn, folders_id_or_ids=set(folder_ids), @@ -124,10 +124,10 @@ async def move_folder_into_workspace( # 9. Remove all project permissions, leave only the user who moved the project user = await get_user(app, user_id=user_id) for project_id in project_ids: - await project_groups_db.delete_all_project_groups( + await projects_groups_repository.delete_all_project_groups( app, connection=conn, project_id=project_id ) - await project_groups_db.update_or_insert_project_group( + await projects_groups_repository.update_or_insert_project_group( app, connection=conn, project_id=project_id, diff --git a/services/web/server/src/simcore_service_webserver/folders/errors.py b/services/web/server/src/simcore_service_webserver/folders/errors.py index 70f788e4c89..e8f2e346868 100644 --- a/services/web/server/src/simcore_service_webserver/folders/errors.py +++ b/services/web/server/src/simcore_service_webserver/folders/errors.py @@ -19,3 +19,17 @@ class FolderAccessForbiddenError(FoldersValueError): class FolderGroupNotFoundError(FoldersValueError): msg_template = "Folder group not found. {reason}" + + +class FoldersRuntimeError(WebServerBaseError, RuntimeError): + ... + + +class FolderNotTrashedError(FoldersRuntimeError): + msg_template = ( + "Cannot delete folder {folder_id} since it was not trashed first: {reason}" + ) + + +class FolderBatchDeleteError(FoldersRuntimeError): + msg_template = "One or more folders could not be deleted: {errors}" diff --git a/services/web/server/src/simcore_service_webserver/folders/folders_trash_service.py b/services/web/server/src/simcore_service_webserver/folders/folders_trash_service.py new file mode 100644 index 00000000000..505f72f257f --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/folders/folders_trash_service.py @@ -0,0 +1,13 @@ +from ._trash_service import ( + batch_delete_trashed_folders_as_admin, + delete_trashed_folder, + list_explicitly_trashed_folders, +) + +__all__: tuple[str, ...] = ( + "batch_delete_trashed_folders_as_admin", + "delete_trashed_folder", + "list_explicitly_trashed_folders", +) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py index 68a7c6b55bf..fb5b149cb16 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py @@ -5,8 +5,8 @@ from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.utils import logged_gather +from ..projects._projects_service import remove_project_dynamic_services from ..projects.exceptions import ProjectLockError, ProjectNotFoundError -from ..projects.projects_service import remove_project_dynamic_services from ..redis import get_redis_lock_manager_client from ..resource_manager.registry import ( RedisResourceRegistry, diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py index f89278ead78..20f3bb45e8b 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py @@ -8,12 +8,15 @@ from models_library.users import UserID, UserNameID from redis.asyncio import Redis from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE -from simcore_postgres_database.errors import DatabaseError +from simcore_postgres_database.aiopg_errors import DatabaseError from simcore_postgres_database.models.users import UserRole -from ..projects.db import ProjectDBAPI +from ..projects._projects_repository_legacy import ProjectDBAPI +from ..projects._projects_service import ( + get_project_for_user, + submit_delete_project_task, +) from ..projects.exceptions import ProjectDeleteError, ProjectNotFoundError -from ..projects.projects_service import get_project_for_user, submit_delete_project_task from ..redis import get_redis_lock_manager_client from ..resource_manager.registry import RedisResourceRegistry from ..users import exceptions diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py index bcb8af72dfa..2bdc3552e40 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py @@ -2,6 +2,7 @@ from typing import Final from aiohttp import web +from common_library.users_enums import UserRole from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStop, @@ -9,16 +10,16 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.logging_utils import log_catch, log_context -from servicelib.utils import logged_gather -from simcore_postgres_database.models.users import UserRole +from servicelib.utils import limited_as_completed, logged_gather -from ..dynamic_scheduler import api as dynamic_scheduler_api -from ..projects.api import has_user_project_access_rights -from ..projects.projects_service import ( +from ..dynamic_scheduler import api as dynamic_scheduler_service +from ..projects._projects_service import ( is_node_id_present_in_any_project_workbench, list_node_ids_in_project, ) +from ..projects.api import has_user_project_access_rights from ..resource_manager.registry import RedisResourceRegistry from ..users.api import get_user_role from ..users.exceptions import UserNotFoundError @@ -54,7 +55,7 @@ async def _remove_service( logging.INFO, msg=f"removing {(service.node_uuid, service.host)} with {save_service_state=}", ): - await dynamic_scheduler_api.stop_dynamic_service( + await dynamic_scheduler_service.stop_dynamic_service( app, dynamic_service_stop=DynamicServiceStop( user_id=service.user_id, @@ -89,7 +90,7 @@ async def remove_orphaned_services( # in between and the GC would remove services that actually should be running. with log_catch(_logger, reraise=False): - running_services = await dynamic_scheduler_api.list_dynamic_services(app) + running_services = await dynamic_scheduler_service.list_dynamic_services(app) if not running_services: # nothing to do return @@ -102,16 +103,41 @@ async def remove_orphaned_services( } known_opened_project_ids = await _list_opened_project_ids(registry) - potentially_running_service_ids: list[ - set[NodeID] | BaseException - ] = await logged_gather( - *(list_node_ids_in_project(app, _) for _ in known_opened_project_ids), - log=_logger, - max_concurrency=_MAX_CONCURRENT_CALLS, - reraise=False, - ) + + # NOTE: Always skip orphan repmoval when `list_node_ids_in_project` raises an error. + # Why? If a service is running but the nodes form the correspondign project cannot be listed, + # the service will be considered as orphaned and closed. + potentially_running_service_ids: list[set[NodeID]] = [] + async for project_nodes_future in limited_as_completed( + ( + list_node_ids_in_project(app, project_id) + for project_id in known_opened_project_ids + ), + limit=_MAX_CONCURRENT_CALLS, + ): + try: + project_nodes = await project_nodes_future + potentially_running_service_ids.append(project_nodes) + except BaseException as e: # pylint:disable=broad-exception-caught + _logger.warning( + create_troubleshotting_log_kwargs( + ( + "Skipping orpahn services removal, call to " + "`list_node_ids_in_project` raised" + ), + error=e, + error_context={ + "running_services": running_services, + "running_services_by_id": running_services_by_id, + "known_opened_project_ids": known_opened_project_ids, + }, + ), + exc_info=True, + ) + return + potentially_running_service_ids_set: set[NodeID] = set().union( - *(_ for _ in potentially_running_service_ids if isinstance(_, set)) + *(node_id for node_id in potentially_running_service_ids) ) _logger.debug( "Allowed service UUIDs from known opened projects: %s", diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py index 6a85dc83539..67106abddcc 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py @@ -5,14 +5,17 @@ from models_library.groups import Group, GroupID, GroupType from models_library.projects import ProjectID from models_library.users import UserID -from simcore_postgres_database.errors import DatabaseError +from simcore_postgres_database.aiopg_errors import DatabaseError from ..groups.api import get_group_from_gid +from ..projects._projects_repository_legacy import ( + APP_PROJECT_DBAPI, + ProjectAccessRights, +) from ..projects.api import ( create_project_group_without_checking_permissions, delete_project_group_without_checking_permissions, ) -from ..projects.db import APP_PROJECT_DBAPI, ProjectAccessRights from ..projects.exceptions import ProjectNotFoundError from ..users.api import get_user, get_user_id_from_gid, get_users_in_group from ..users.exceptions import UserNotFoundError diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py index b58974ffd96..b992d25b387 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py @@ -1,7 +1,8 @@ """ - Scheduled task that periodically runs prune in the garbage collector service +Scheduled task that periodically runs prune in the garbage collector service """ + import asyncio import logging from collections.abc import AsyncIterator, Callable @@ -11,7 +12,7 @@ from tenacity.before_sleep import before_sleep_log from tenacity.wait import wait_exponential -from ..api_keys.api import prune_expired_api_keys +from ..api_keys import api_keys_service logger = logging.getLogger(__name__) @@ -31,7 +32,7 @@ async def _run_task(app: web.Application): It is resilient, i.e. if update goes wrong, it waits a bit and retries """ - if deleted := await prune_expired_api_keys(app): + if deleted := await api_keys_service.prune_expired_api_keys(app): # broadcast force logout of user_id for api_key in deleted: logger.info("API-key %s expired and was removed", f"{api_key=}") diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py index 47d5e7212f2..46df72c0a70 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py @@ -8,11 +8,12 @@ from collections.abc import AsyncIterator, Callable from aiohttp import web +from servicelib.logging_utils import log_context from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.wait import wait_exponential -from ..trash._service import prune_trash +from ..trash import trash_service _logger = logging.getLogger(__name__) @@ -28,11 +29,8 @@ before_sleep=before_sleep_log(_logger, logging.WARNING), ) async def _run_task(app: web.Application): - if deleted := await prune_trash(app): - for name in deleted: - _logger.info("Trash item %s expired and was deleted", f"{name}") - else: - _logger.info("No trash items expired") + with log_context(_logger, logging.INFO, "Deleting expired trashed items"): + await trash_service.safe_delete_expired_trash_as_admin(app) async def _run_periodically(app: web.Application, wait_interval_s: float): diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py index 3e76c6c947c..3c42457ece5 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py @@ -3,17 +3,13 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.logging_utils import set_parent_module_log_level -from simcore_service_webserver.garbage_collector._tasks_trash import ( - create_background_task_to_prune_trash, -) from ..application_settings import get_application_settings from ..login.plugin import setup_login_storage -from ..projects.db import setup_projects_db +from ..products.plugin import setup_products +from ..projects._projects_repository_legacy import setup_projects_db from ..socketio.plugin import setup_socketio -from ._tasks_api_keys import create_background_task_to_prune_api_keys -from ._tasks_core import run_background_task -from ._tasks_users import create_background_task_for_trial_accounts +from . import _tasks_api_keys, _tasks_core, _tasks_trash, _tasks_users from .settings import get_plugin_settings _logger = logging.getLogger(__name__) @@ -26,6 +22,9 @@ logger=_logger, ) def setup_garbage_collector(app: web.Application) -> None: + # for trashing + setup_products(app) + # - project-api needs access to db setup_projects_db(app) # - project needs access to socketio via notify_project_state_update @@ -35,7 +34,7 @@ def setup_garbage_collector(app: web.Application) -> None: settings = get_plugin_settings(app) - app.cleanup_ctx.append(run_background_task) + app.cleanup_ctx.append(_tasks_core.run_background_task) set_parent_module_log_level( _logger.name, min(logging.INFO, get_application_settings(app).log_level) @@ -48,10 +47,17 @@ def setup_garbage_collector(app: web.Application) -> None: # If more tasks of this nature are needed, we should setup some sort of registration mechanism # with a interface such that plugins can pass tasks to the GC plugin to handle them interval_s = settings.GARBAGE_COLLECTOR_EXPIRED_USERS_CHECK_INTERVAL_S - app.cleanup_ctx.append(create_background_task_for_trial_accounts(interval_s)) + app.cleanup_ctx.append( + _tasks_users.create_background_task_for_trial_accounts(interval_s) + ) # SEE https://github.com/ITISFoundation/osparc-issues/issues/705 wait_period_s = settings.GARBAGE_COLLECTOR_PRUNE_APIKEYS_INTERVAL_S - app.cleanup_ctx.append(create_background_task_to_prune_api_keys(wait_period_s)) + app.cleanup_ctx.append( + _tasks_api_keys.create_background_task_to_prune_api_keys(wait_period_s) + ) - app.cleanup_ctx.append(create_background_task_to_prune_trash(wait_period_s)) + # SEE https://github.com/ITISFoundation/osparc-issues#468 + app.cleanup_ctx.append( + _tasks_trash.create_background_task_to_prune_trash(wait_period_s) + ) diff --git a/services/web/server/src/simcore_service_webserver/groups/_common/schemas.py b/services/web/server/src/simcore_service_webserver/groups/_common/schemas.py index 18ab7cba5ff..dc173d59496 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_common/schemas.py +++ b/services/web/server/src/simcore_service_webserver/groups/_common/schemas.py @@ -5,7 +5,7 @@ from models_library.users import UserID from pydantic import Field -from ..._constants import RQ_PRODUCT_KEY, RQT_USERID_KEY +from ...constants import RQ_PRODUCT_KEY, RQT_USERID_KEY class GroupsRequestContext(RequestParameters): diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py b/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py index 0d8b24b83fe..83740fce392 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py @@ -18,9 +18,9 @@ StandardGroupUpdate, ) from models_library.users import UserID -from simcore_postgres_database.errors import UniqueViolation +from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.users import users -from simcore_postgres_database.utils_products import execute_get_or_create_product_group +from simcore_postgres_database.utils_products import get_or_create_product_group from simcore_postgres_database.utils_repos import ( pass_or_acquire_connection, transaction_context, @@ -118,7 +118,7 @@ async def _get_group_and_access_rights_or_raise( .select_from(groups.join(user_to_groups, user_to_groups.c.gid == groups.c.gid)) .where((user_to_groups.c.uid == caller_id) & (user_to_groups.c.gid == group_id)) ) - row = result.first() + row = result.one_or_none() if not row: raise GroupNotFoundError(gid=group_id) @@ -173,7 +173,6 @@ async def get_all_user_groups_with_read_access( *, user_id: UserID, ) -> GroupsByTypeTuple: - """ Returns the user primary group, standard groups and the all group """ @@ -370,15 +369,14 @@ async def update_standard_group( # NOTE: update does not include access-rights access_rights = AccessRightsDict(**row.access_rights) # type: ignore[typeddict-item] - result = await conn.stream( + result = await conn.execute( # pylint: disable=no-value-for-parameter groups.update() .values(**values) .where((groups.c.gid == group_id) & (groups.c.type == GroupType.STANDARD)) .returning(*_GROUP_COLUMNS) ) - row = await result.fetchone() - assert row # nosec + row = result.one() group = _row_to_model(row) return group, access_rights @@ -442,8 +440,7 @@ async def get_user_from_email( def _group_user_cols(caller_id: UserID): return ( users.c.id, - users.c.name, - *visible_user_profile_cols(caller_id), + *visible_user_profile_cols(caller_id, username_label="name"), users.c.primary_gid, ) @@ -758,7 +755,7 @@ async def auto_add_user_to_product_group( product_name: str, ) -> GroupID: async with transaction_context(get_asyncpg_engine(app), connection) as conn: - product_group_id: GroupID = await execute_get_or_create_product_group( + product_group_id: GroupID = await get_or_create_product_group( conn, product_name ) diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py b/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py index 5456776cfe6..18032f8ea37 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py @@ -19,7 +19,8 @@ from .._meta import API_VTAG from ..login.decorators import login_required -from ..products.api import Product, get_current_product +from ..products import products_web +from ..products.models import Product from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from . import _groups_service @@ -45,7 +46,7 @@ async def list_groups(request: web.Request): """ List all groups (organizations, primary, everyone and products) I belong to """ - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) req_ctx = GroupsRequestContext.model_validate(request) groups_by_type = await _groups_service.list_user_groups_with_read_access( diff --git a/services/web/server/src/simcore_service_webserver/groups/plugin.py b/services/web/server/src/simcore_service_webserver/groups/plugin.py index 4b240bee190..e8e56413671 100644 --- a/services/web/server/src/simcore_service_webserver/groups/plugin.py +++ b/services/web/server/src/simcore_service_webserver/groups/plugin.py @@ -3,7 +3,7 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..products.plugin import setup_products from . import _classifiers_rest, _groups_rest diff --git a/services/web/server/src/simcore_service_webserver/invitations/_client.py b/services/web/server/src/simcore_service_webserver/invitations/_client.py index b7abdcf10ee..84417a759ea 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/_client.py +++ b/services/web/server/src/simcore_service_webserver/invitations/_client.py @@ -16,7 +16,7 @@ from servicelib.aiohttp import status from yarl import URL -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from .errors import ( InvalidInvitationError, InvitationsError, diff --git a/services/web/server/src/simcore_service_webserver/products/_invitations_handlers.py b/services/web/server/src/simcore_service_webserver/invitations/_rest.py similarity index 89% rename from services/web/server/src/simcore_service_webserver/products/_invitations_handlers.py rename to services/web/server/src/simcore_service_webserver/invitations/_rest.py index a7cbd01dee1..ec0b8cbb1c0 100644 --- a/services/web/server/src/simcore_service_webserver/products/_invitations_handlers.py +++ b/services/web/server/src/simcore_service_webserver/invitations/_rest.py @@ -2,8 +2,8 @@ from aiohttp import web from models_library.api_schemas_invitations.invitations import ApiInvitationInputs -from models_library.api_schemas_webserver.product import ( - GenerateInvitation, +from models_library.api_schemas_webserver.products import ( + InvitationGenerate, InvitationGenerated, ) from models_library.rest_base import RequestParameters @@ -11,15 +11,15 @@ from pydantic import Field from servicelib.aiohttp.requests_validation import parse_request_body_as from servicelib.request_keys import RQT_USERID_KEY -from simcore_service_webserver.utils_aiohttp import envelope_json_response from yarl import URL -from .._constants import RQ_PRODUCT_KEY from .._meta import API_VTAG as VTAG -from ..invitations import api +from ..constants import RQ_PRODUCT_KEY from ..login.decorators import login_required from ..security.decorators import permission_required from ..users.api import get_user_name_and_email +from ..utils_aiohttp import envelope_json_response +from . import api routes = web.RouteTableDef() @@ -37,7 +37,7 @@ class _ProductsRequestContext(RequestParameters): @permission_required("product.invitations.create") async def generate_invitation(request: web.Request): req_ctx = _ProductsRequestContext.model_validate(request) - body = await parse_request_body_as(GenerateInvitation, request) + body = await parse_request_body_as(InvitationGenerate, request) _, user_email = await get_user_name_and_email(request.app, user_id=req_ctx.user_id) diff --git a/services/web/server/src/simcore_service_webserver/invitations/_service.py b/services/web/server/src/simcore_service_webserver/invitations/_service.py index ae7cabaf616..f29bf595efc 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/_service.py +++ b/services/web/server/src/simcore_service_webserver/invitations/_service.py @@ -11,7 +11,7 @@ from pydantic import AnyHttpUrl, TypeAdapter, ValidationError from ..groups.api import is_user_by_email_in_group -from ..products.api import Product +from ..products.models import Product from ._client import get_invitations_service_api from .errors import ( MSG_INVALID_INVITATION_URL, diff --git a/services/web/server/src/simcore_service_webserver/invitations/plugin.py b/services/web/server/src/simcore_service_webserver/invitations/plugin.py index fd20f0f8601..344f652ac83 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/plugin.py +++ b/services/web/server/src/simcore_service_webserver/invitations/plugin.py @@ -6,11 +6,11 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from simcore_service_webserver.products.plugin import setup_products -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..db.plugin import setup_db from ..products.plugin import setup_products +from . import _rest from ._client import invitations_service_api_cleanup_ctx _logger = logging.getLogger(__name__) @@ -28,4 +28,6 @@ def setup_invitations(app: web.Application): setup_db(app) setup_products(app) + app.router.add_routes(_rest.routes) + app.cleanup_ctx.append(invitations_service_api_cleanup_ctx) diff --git a/services/web/server/src/simcore_service_webserver/invitations/settings.py b/services/web/server/src/simcore_service_webserver/invitations/settings.py index 02755291910..9401a1912f8 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/settings.py +++ b/services/web/server/src/simcore_service_webserver/invitations/settings.py @@ -17,7 +17,7 @@ URLPart, ) -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY _INVITATION_VTAG_V1: Final[VersionTag] = TypeAdapter(VersionTag).validate_python("v1") diff --git a/services/web/server/src/simcore_service_webserver/licenses/_common/models.py b/services/web/server/src/simcore_service_webserver/licenses/_common/models.py index 887a6db6f59..2469eb14614 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_common/models.py @@ -1,13 +1,5 @@ -import logging -from datetime import datetime -from typing import Any, NamedTuple, cast - from models_library.basic_types import IDStr -from models_library.licensed_items import ( - VIP_DETAILS_EXAMPLE, - LicensedItemID, - LicensedResourceType, -) +from models_library.licenses import LicensedItemID from models_library.resource_tracker import PricingPlanId, PricingUnitId from models_library.resource_tracker_licensed_items_purchases import ( LicensedItemPurchaseID, @@ -21,50 +13,10 @@ from models_library.rest_pagination import PageQueryParameters from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, ConfigDict, Field, PositiveInt -from pydantic.config import JsonDict +from pydantic import BaseModel, ConfigDict, Field from servicelib.request_keys import RQT_USERID_KEY -from ..._constants import RQ_PRODUCT_KEY - -_logger = logging.getLogger(__name__) - - -class LicensedItem(BaseModel): - licensed_item_id: LicensedItemID - display_name: str - licensed_resource_name: str - licensed_resource_type: LicensedResourceType - licensed_resource_data: dict[str, Any] - pricing_plan_id: PricingPlanId - created_at: datetime - modified_at: datetime - - @staticmethod - def _update_json_schema_extra(schema: JsonDict) -> None: - schema.update( - { - "examples": [ - { - "licensed_item_id": "0362b88b-91f8-4b41-867c-35544ad1f7a1", - "display_name": "my best model", - "licensed_resource_name": "best-model", - "licensed_resource_type": f"{LicensedResourceType.VIP_MODEL}", - "licensed_resource_data": cast(JsonDict, VIP_DETAILS_EXAMPLE), - "pricing_plan_id": "15", - "created_at": "2024-12-12 09:59:26.422140", - "modified_at": "2024-12-12 09:59:26.422140", - } - ] - } - ) - - model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) - - -class LicensedItemPage(NamedTuple): - items: list[LicensedItem] - total: PositiveInt +from ...constants import RQ_PRODUCT_KEY class LicensedItemsRequestContext(RequestParameters): diff --git a/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_models.py b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_models.py new file mode 100644 index 00000000000..c3d6ba39732 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_models.py @@ -0,0 +1,120 @@ +import re +from typing import Annotated, Any, Literal, NamedTuple, TypeAlias, cast + +from models_library.basic_types import IDStr +from models_library.licenses import VIP_DETAILS_EXAMPLE, FeaturesDict +from pydantic import ( + BaseModel, + BeforeValidator, + ConfigDict, + Field, + HttpUrl, + StringConstraints, + TypeAdapter, +) +from pydantic.config import JsonDict + +_max_str_adapter: TypeAdapter[str] = TypeAdapter( + Annotated[str, StringConstraints(strip_whitespace=True, max_length=1_000)] +) + + +def _clean_dict_data(data_dict): + """ + Strips leading/trailing whitespace from all string values + and removes keys whose stripped value is empty. + """ + cleaned = {} + for k, v in data_dict.items(): + if v is not None: + if isinstance(v, str): + v_stripped = v.strip() + # Keep the key only if it's not empty after strip + if v_stripped: + cleaned[k] = v_stripped + else: + # If it's not a string, just copy the value as is + cleaned[k] = v + return cleaned + + +def _feature_descriptor_to_dict(descriptor: str) -> dict[str, Any]: + # NOTE: this is manually added in the server side so be more robust to errors + descriptor = _max_str_adapter.validate_python(descriptor.strip("{}")) + pattern = r"(\w{1,100}):\s*([^,]{1,100})" + matches = re.findall(pattern, descriptor) + return dict(matches) + + +class ItisVipData(BaseModel): + # Designed to parse items from response from VIP-API + id: Annotated[int, Field(alias="ID")] + description: Annotated[str, Field(alias="Description")] + thumbnail: Annotated[str, Field(alias="Thumbnail")] + features: Annotated[ + FeaturesDict, + BeforeValidator(_clean_dict_data), + BeforeValidator(_feature_descriptor_to_dict), + Field(alias="Features"), + ] + doi: Annotated[str | None, Field(alias="DOI")] + license_key: Annotated[ + str, + Field( + alias="LicenseKey", + description="NOTE: skips VIP w/o license key", + ), + ] + license_version: Annotated[ + str, + Field( + alias="LicenseVersion", + description="NOTE: skips VIP w/o license version", + ), + ] + protection: Annotated[Literal["Code", "PayPal"], Field(alias="Protection")] + available_from_url: Annotated[HttpUrl | None, Field(alias="AvailableFromURL")] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # complete + cast(JsonDict, VIP_DETAILS_EXAMPLE), + # minimal + { + "id": 1, + "description": "A detailed description of the VIP model", + "thumbnail": "https://example.com/thumbnail.jpg", + "features": {"date": "2013-02-01"}, + "doi": "null", + "license_key": "ABC123XYZ", + "license_version": "1.0", + "protection": "Code", + "available_from_url": "null", + }, + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + +class ItisVipResourceData(BaseModel): + category_id: IDStr + category_display: str + source: Annotated[ + ItisVipData, Field(description="Original published data in the api") + ] + terms_of_use_url: HttpUrl | None = None + + +CategoryID: TypeAlias = IDStr +CategoryDisplay: TypeAlias = str + + +class CategoryTuple(NamedTuple): + url: HttpUrl + id: CategoryID + display: CategoryDisplay diff --git a/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_service.py b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_service.py new file mode 100644 index 00000000000..228738f629c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_service.py @@ -0,0 +1,50 @@ +import logging +from typing import Annotated + +import httpx +from pydantic import BaseModel, Field, HttpUrl, ValidationError +from tenacity import ( + retry, + retry_if_exception_cause_type, + stop_after_attempt, + wait_exponential, +) + +from ._itis_vip_models import ItisVipData + +_logger = logging.getLogger(__name__) + + +class _ItisVipApiResponse(BaseModel): + msg: int | None = None # still not used + available_downloads: Annotated[list[dict], Field(alias="availableDownloads")] + + +@retry( + wait=wait_exponential(multiplier=1, min=4, max=10), + stop=stop_after_attempt(5), + retry=retry_if_exception_cause_type(httpx.RequestError), +) +async def get_category_items( + client: httpx.AsyncClient, url: HttpUrl +) -> list[ItisVipData]: + """ + + Raises: + httpx.HTTPStatusError + pydantic.ValidationError + """ + response = await client.post(f"{url}") + response.raise_for_status() + + data = _ItisVipApiResponse.model_validate(response.json()) + + # Filters only downloads with ItisVipData guarantees + category_items = [] + for download in data.available_downloads: + try: + category_items.append(ItisVipData.model_validate(download)) + except ValidationError as err: + _logger.debug("Skipped %s because %s", download, err) + + return category_items diff --git a/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_settings.py b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_settings.py new file mode 100644 index 00000000000..e93ab7b4817 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_settings.py @@ -0,0 +1,58 @@ +from typing import Annotated + +from pydantic import AfterValidator, HttpUrl +from settings_library.base import BaseCustomSettings + +from ._itis_vip_models import CategoryDisplay, CategoryID, CategoryTuple + + +def _validate_url_contains_category(url: str) -> str: + if "{category}" not in url: + msg = "URL must contain '{category}'" + raise ValueError(msg) + return url + + +def _to_categories( + api_url: str, category_map: dict[CategoryID, CategoryDisplay] +) -> list[CategoryTuple]: + return [ + CategoryTuple( + url=HttpUrl(api_url.format(category=category_id)), + id=category_id, + display=category_display, + ) + for category_id, category_display in category_map.items() + ] + + +class ItisVipSettings(BaseCustomSettings): + LICENSES_ITIS_VIP_API_URL: Annotated[ + str, AfterValidator(_validate_url_contains_category) + ] + LICENSES_ITIS_VIP_CATEGORIES: dict[CategoryID, CategoryDisplay] + + def get_urls(self) -> list[HttpUrl]: + return [ + HttpUrl(self.LICENSES_ITIS_VIP_API_URL.format(category=category)) + for category in self.LICENSES_ITIS_VIP_CATEGORIES + ] + + def to_categories(self) -> list[CategoryTuple]: + return _to_categories( + self.LICENSES_ITIS_VIP_API_URL, + self.LICENSES_ITIS_VIP_CATEGORIES, + ) + + +class SpeagPhantomsSettings(BaseCustomSettings): + LICENSES_SPEAG_PHANTOMS_API_URL: Annotated[ + str, AfterValidator(_validate_url_contains_category) + ] + LICENSES_SPEAG_PHANTOMS_CATEGORIES: dict[CategoryID, CategoryDisplay] + + def to_categories(self) -> list[CategoryTuple]: + return _to_categories( + self.LICENSES_SPEAG_PHANTOMS_API_URL, + self.LICENSES_SPEAG_PHANTOMS_CATEGORIES, + ) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_syncer_service.py b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_syncer_service.py new file mode 100644 index 00000000000..6bcd772cdf5 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_syncer_service.py @@ -0,0 +1,120 @@ +import asyncio +import datetime +import logging +from datetime import timedelta + +from aiohttp import web +from httpx import AsyncClient +from models_library.licenses import LicensedResourceType +from servicelib.async_utils import cancel_wait_task +from servicelib.background_task_utils import exclusive_periodic +from servicelib.logging_utils import log_catch, log_context +from simcore_service_webserver.licenses import ( + _itis_vip_service, + _licensed_resources_service, +) + +from ..redis import get_redis_lock_manager_client_sdk, setup_redis +from ._itis_vip_models import CategoryTuple, ItisVipData, ItisVipResourceData +from ._licensed_resources_service import RegistrationState + +_logger = logging.getLogger(__name__) + + +async def sync_licensed_resources( + app: web.Application, categories: list[CategoryTuple] +): + async with AsyncClient() as http_client: + for category_url, category_id, category_display in categories: + assert f"{category_url}".endswith(category_id) # nosec + + # FETCH & VALIDATION + with log_context( + _logger, logging.INFO, "Fetching %s and validating", category_url + ), log_catch(_logger, reraise=True): + vip_data_items: list[ + ItisVipData + ] = await _itis_vip_service.get_category_items( + http_client, category_url + ) + + # REGISTRATION + for vip_data in vip_data_items: + + licensed_resource_name = f"{category_id}/{vip_data.id}" + + with log_context( + _logger, logging.INFO, "Registering %s", licensed_resource_name + ), log_catch(_logger, reraise=False): + result = await _licensed_resources_service.register_licensed_resource( + app, + licensed_item_display_name=f"{vip_data.features.get('name', 'UNNAMED!!')} " + f"{vip_data.features.get('version', 'UNVERSIONED!!')}", + # RESOURCE unique identifiers + licensed_resource_name=licensed_resource_name, + licensed_resource_type=LicensedResourceType.VIP_MODEL, + # RESOURCE extended data + licensed_resource_data=ItisVipResourceData( + category_id=category_id, + category_display=category_display, + source=vip_data, + ), + ) + + if result.state == RegistrationState.ALREADY_REGISTERED: + # NOTE: not really interesting + _logger.debug(result.message) + + elif result.state == RegistrationState.DIFFERENT_RESOURCE: + # NOTE: notify since need human decision + _logger.warning(result.message) + + else: + assert ( + result.state == RegistrationState.NEWLY_REGISTERED + ) # nosec + # NOTE: inform since needs curation + _logger.info( + "%s . New licensed_resource_id=%s pending for activation.", + result.message, + result.registered.licensed_resource_id, + ) + + +_BACKGROUND_TASK_NAME = f"{__name__}.itis_vip_syncer_cleanup_ctx._periodic_sync" + + +def setup_itis_vip_syncer( + app: web.Application, + categories: list[CategoryTuple], + resync_after: datetime.timedelta, +): + setup_redis(app) + + async def _lifespan(app_: web.Application): + with ( + log_context( + _logger, + logging.INFO, + f"IT'IS VIP syncing {len(categories)} categories", + ), + log_catch(_logger, reraise=False), + ): + + @exclusive_periodic( + get_redis_lock_manager_client_sdk(app_), + task_interval=resync_after, + retry_after=timedelta(minutes=1), + ) + async def _periodic_sync() -> None: + await sync_licensed_resources(app_, categories=categories) + + background_task = asyncio.create_task( + _periodic_sync(), name=_BACKGROUND_TASK_NAME + ) + + yield + + await cancel_wait_task(background_task) + + app.cleanup_ctx.append(_lifespan) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_models.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_models.py index 390904c694c..a19c9a22135 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_models.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_models.py @@ -2,7 +2,7 @@ from typing import NamedTuple from models_library.basic_types import IDStr -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from models_library.products import ProductName from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -22,6 +22,8 @@ class LicensedItemCheckoutGet(BaseModel): licensed_item_checkout_id: LicensedItemCheckoutID licensed_item_id: LicensedItemID + key: str + version: str wallet_id: WalletID user_id: UserID user_email: str diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_rest.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_rest.py index 5565ec27be6..31369d7f0f1 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_rest.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_rest.py @@ -61,6 +61,8 @@ async def get_licensed_item_checkout(request: web.Request): output = LicensedItemCheckoutRestGet.model_construct( licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, licensed_item_id=checkout_item.licensed_item_id, + key=checkout_item.key, + version=checkout_item.version, wallet_id=checkout_item.wallet_id, user_id=checkout_item.user_id, user_email=checkout_item.user_email, @@ -105,6 +107,8 @@ async def list_licensed_item_checkouts_for_wallet(request: web.Request): LicensedItemCheckoutRestGet.model_construct( licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, licensed_item_id=checkout_item.licensed_item_id, + key=checkout_item.key, + version=checkout_item.version, wallet_id=checkout_item.wallet_id, user_id=checkout_item.user_id, user_email=checkout_item.user_email, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py index 53c0f48379b..ed70c51bc8f 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py @@ -2,7 +2,7 @@ from models_library.api_schemas_resource_usage_tracker import ( licensed_items_checkouts as rut_licensed_items_checkouts, ) -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID from models_library.products import ProductName from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -18,6 +18,7 @@ from ..rabbitmq import get_rabbitmq_rpc_client from ..users.api import get_user from ..wallets.api import get_wallet_by_user +from . import _licensed_items_repository from ._licensed_items_checkouts_models import ( LicensedItemCheckoutGet, LicensedItemCheckoutGetPage, @@ -60,6 +61,8 @@ async def list_licensed_items_checkouts_for_wallet( LicensedItemCheckoutGet.model_construct( licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, licensed_item_id=checkout_item.licensed_item_id, + key=checkout_item.key, + version=checkout_item.version, wallet_id=checkout_item.wallet_id, user_id=checkout_item.user_id, user_email=checkout_item.user_email, @@ -100,6 +103,8 @@ async def get_licensed_item_checkout( return LicensedItemCheckoutGet.model_construct( licensed_item_checkout_id=checkout_item.licensed_item_checkout_id, licensed_item_id=checkout_item.licensed_item_id, + key=checkout_item.key, + version=checkout_item.version, wallet_id=checkout_item.wallet_id, user_id=checkout_item.user_id, user_email=checkout_item.user_email, @@ -132,11 +137,17 @@ async def checkout_licensed_item_for_wallet( user = await get_user(app, user_id=user_id) + licensed_item_db = await _licensed_items_repository.get( + app, licensed_item_id=licensed_item_id, product_name=product_name + ) + rpc_client = get_rabbitmq_rpc_client(app) licensed_item_get: rut_licensed_items_checkouts.LicensedItemCheckoutGet = ( await licensed_items_checkouts.checkout_licensed_item( rpc_client, - licensed_item_id=licensed_item_id, + licensed_item_id=licensed_item_db.licensed_item_id, + key=licensed_item_db.key, + version=licensed_item_db.version, wallet_id=wallet_id, product_name=product_name, num_of_seats=num_of_seats, @@ -149,6 +160,8 @@ async def checkout_licensed_item_for_wallet( return LicensedItemCheckoutGet.model_construct( licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, licensed_item_id=licensed_item_get.licensed_item_id, + key=licensed_item_get.key, + version=licensed_item_get.version, wallet_id=licensed_item_get.wallet_id, user_id=licensed_item_get.user_id, user_email=licensed_item_get.user_email, @@ -195,6 +208,8 @@ async def release_licensed_item_for_wallet( return LicensedItemCheckoutGet.model_construct( licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, licensed_item_id=licensed_item_get.licensed_item_id, + key=licensed_item_get.key, + version=licensed_item_get.version, wallet_id=licensed_item_get.wallet_id, user_id=licensed_item_get.user_id, user_email=licensed_item_get.user_email, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_service.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_service.py index d42ad904851..aaff40c9af0 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_service.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_purchases_service.py @@ -58,6 +58,8 @@ async def list_licensed_items_purchases( licensed_item_purchase_id=item.licensed_item_purchase_id, product_name=item.product_name, licensed_item_id=item.licensed_item_id, + key=item.key, + version=item.version, wallet_id=item.wallet_id, pricing_unit_cost_id=item.pricing_unit_cost_id, pricing_unit_cost=item.pricing_unit_cost, @@ -102,6 +104,8 @@ async def get_licensed_item_purchase( licensed_item_purchase_id=licensed_item_get.licensed_item_purchase_id, product_name=licensed_item_get.product_name, licensed_item_id=licensed_item_get.licensed_item_id, + key=licensed_item_get.key, + version=licensed_item_get.version, wallet_id=licensed_item_get.wallet_id, pricing_unit_cost_id=licensed_item_get.pricing_unit_cost_id, pricing_unit_cost=licensed_item_get.pricing_unit_cost, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_repository.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_repository.py index b63c9e8c58f..28f9bb6a704 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_repository.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_repository.py @@ -1,35 +1,37 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" - import logging -from typing import Literal, cast +from typing import cast from aiohttp import web -from models_library.licensed_items import ( +from models_library.licenses import ( + LicensedItem, LicensedItemDB, LicensedItemID, - LicensedItemUpdateDB, + LicensedItemKey, + LicensedItemPatchDB, + LicensedItemVersion, LicensedResourceType, ) from models_library.products import ProductName from models_library.resource_tracker import PricingPlanId from models_library.rest_ordering import OrderBy, OrderDirection from pydantic import NonNegativeInt +from simcore_postgres_database.models.licensed_item_to_resource import ( + licensed_item_to_resource, +) from simcore_postgres_database.models.licensed_items import licensed_items +from simcore_postgres_database.models.licensed_resources import licensed_resources from simcore_postgres_database.utils_repos import ( get_columns_from_db_model, pass_or_acquire_connection, transaction_context, ) from sqlalchemy import asc, desc, func +from sqlalchemy.dialects import postgresql from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.sql import select from ..db.plugin import get_asyncpg_engine -from .errors import LicensedItemNotFoundError +from .errors import LicensedItemNotFoundError, LicensedKeyVersionNotFoundError _logger = logging.getLogger(__name__) @@ -37,32 +39,52 @@ _SELECTION_ARGS = get_columns_from_db_model(licensed_items, LicensedItemDB) +def _create_insert_query( + display_name: str, + key: LicensedItemKey, + version: LicensedItemVersion, + licensed_resource_type: LicensedResourceType, + product_name: ProductName, + pricing_plan_id: PricingPlanId, +): + return ( + postgresql.insert(licensed_items) + .values( + licensed_resource_type=licensed_resource_type, + display_name=display_name, + key=key, + version=version, + pricing_plan_id=pricing_plan_id, + product_name=product_name, + created=func.now(), + modified=func.now(), + ) + .returning(*_SELECTION_ARGS) + ) + + async def create( app: web.Application, connection: AsyncConnection | None = None, *, + key: LicensedItemKey, + version: LicensedItemVersion, display_name: str, - licensed_resource_name: str, licensed_resource_type: LicensedResourceType, - licensed_resource_data: dict | None, - product_name: ProductName | None, - pricing_plan_id: PricingPlanId | None, + product_name: ProductName, + pricing_plan_id: PricingPlanId, ) -> LicensedItemDB: + + query = _create_insert_query( + display_name, + key, + version, + licensed_resource_type, + product_name, + pricing_plan_id, + ) async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.execute( - licensed_items.insert() - .values( - product_name=product_name, - display_name=display_name, - licensed_resource_name=licensed_resource_name, - licensed_resource_type=licensed_resource_type, - licensed_resource_data=licensed_resource_data, - pricing_plan_id=pricing_plan_id, - created=func.now(), - modified=func.now(), - ) - .returning(*_SELECTION_ARGS) - ) + result = await conn.execute(query) row = result.one() return LicensedItemDB.model_validate(row) @@ -76,8 +98,8 @@ async def list_( limit: NonNegativeInt, order_by: OrderBy, # filters - trashed: Literal["exclude", "only", "include"] = "exclude", - inactive: Literal["exclude", "only", "include"] = "exclude", + filter_by_licensed_resource_type: LicensedResourceType | None = None, + include_hidden_items_on_market: bool = False, ) -> tuple[int, list[LicensedItemDB]]: base_query = ( @@ -86,22 +108,12 @@ async def list_( .where(licensed_items.c.product_name == product_name) ) - # Apply trashed filter - if trashed == "exclude": - base_query = base_query.where(licensed_items.c.trashed.is_(None)) - elif trashed == "only": - base_query = base_query.where(licensed_items.c.trashed.is_not(None)) - - if inactive == "only": - base_query = base_query.where( - licensed_items.c.product_name.is_(None) - | licensed_items.c.licensed_item_id.is_(None) - ) - elif inactive == "exclude": - base_query = base_query.where( - licensed_items.c.product_name.is_not(None) - & licensed_items.c.licensed_item_id.is_not(None) + if filter_by_licensed_resource_type: + base_query.where( + licensed_items.c.licensed_resource_type == filter_by_licensed_resource_type ) + if not include_hidden_items_on_market: + base_query.where(licensed_items.c.is_hidden_on_market.is_(False)) # Select total count from base_query subquery = base_query.subquery() @@ -134,7 +146,7 @@ async def get( licensed_item_id: LicensedItemID, product_name: ProductName, ) -> LicensedItemDB: - base_query = ( + select_query = ( select(*_SELECTION_ARGS) .select_from(licensed_items) .where( @@ -144,8 +156,8 @@ async def get( ) async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream(base_query) - row = await result.first() + result = await conn.execute(select_query) + row = result.one_or_none() if row is None: raise LicensedItemNotFoundError(licensed_item_id=licensed_item_id) return LicensedItemDB.model_validate(row) @@ -157,7 +169,7 @@ async def update( *, product_name: ProductName, licensed_item_id: LicensedItemID, - updates: LicensedItemUpdateDB, + updates: LicensedItemPatchDB, ) -> LicensedItemDB: # NOTE: at least 'touch' if updated_values is empty _updates = { @@ -165,11 +177,6 @@ async def update( licensed_items.c.modified.name: func.now(), } - # trashing - assert "trash" in dict(LicensedItemUpdateDB.model_fields) # nosec - if trash := _updates.pop("trash", None): - _updates[licensed_items.c.trashed.name] = func.now() if trash else None - async with transaction_context(get_asyncpg_engine(app), connection) as conn: result = await conn.execute( licensed_items.update() @@ -200,3 +207,164 @@ async def delete( & (licensed_items.c.product_name == product_name) ) ) + + +### LICENSED ITEMS DOMAIN + + +def _create_licensed_resource_subquery(product_name: ProductName): + # Step 1: Create an ordered subquery + _ordered_subquery = ( + select( + licensed_item_to_resource.c.licensed_item_id, + licensed_resources.c.licensed_resource_data, + licensed_resources.c.priority, + licensed_resources.c.licensed_resource_id, + ) + .select_from( + licensed_item_to_resource.join( + licensed_resources, + licensed_resources.c.licensed_resource_id + == licensed_item_to_resource.c.licensed_resource_id, + ) + ) + .where(licensed_item_to_resource.c.product_name == product_name) + .order_by( + licensed_resources.c.priority, licensed_resources.c.licensed_resource_id + ) + ).subquery("ordered_subquery") + + # Step 2: Aggregate the ordered subquery results + _licensed_resource_subquery = ( + select( + _ordered_subquery.c.licensed_item_id, + func.array_agg(_ordered_subquery.c.licensed_resource_data).label( + "licensed_resources" + ), + ).group_by(_ordered_subquery.c.licensed_item_id) + ).subquery("licensed_resource_subquery") + + return _licensed_resource_subquery # noqa: RET504 + + +async def get_licensed_item_by_key_version( + app: web.Application, + connection: AsyncConnection | None = None, + *, + key: LicensedItemKey, + version: LicensedItemVersion, + product_name: ProductName, +) -> LicensedItem: + + _licensed_resource_subquery = _create_licensed_resource_subquery( + product_name=product_name + ) + + select_query = ( + select( + licensed_items.c.licensed_item_id, + licensed_items.c.key, + licensed_items.c.version, + licensed_items.c.display_name, + licensed_items.c.licensed_resource_type, + _licensed_resource_subquery.c.licensed_resources, + licensed_items.c.pricing_plan_id, + licensed_items.c.is_hidden_on_market, + licensed_items.c.created.label("created_at"), + licensed_items.c.modified.label("modified_at"), + ) + .select_from( + licensed_items.join( + _licensed_resource_subquery, + licensed_items.c.licensed_item_id + == _licensed_resource_subquery.c.licensed_item_id, + ) + ) + .where( + (licensed_items.c.key == key) + & (licensed_items.c.version == version) + & (licensed_items.c.product_name == product_name) + ) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(select_query) + row = result.one_or_none() + if row is None: + raise LicensedKeyVersionNotFoundError(key=key, version=version) + return LicensedItem.model_validate(dict(row)) + + +async def list_licensed_items( + app: web.Application, + connection: AsyncConnection | None = None, + *, + product_name: ProductName, + offset: NonNegativeInt, + limit: NonNegativeInt, + order_by: OrderBy, + # filters + filter_by_licensed_resource_type: LicensedResourceType | None = None, + include_hidden_items_on_market: bool = False, +) -> tuple[int, list[LicensedItem]]: + + _licensed_resource_subquery = _create_licensed_resource_subquery( + product_name=product_name + ) + + base_query = ( + select( + licensed_items.c.licensed_item_id, + licensed_items.c.key, + licensed_items.c.version, + licensed_items.c.display_name, + licensed_items.c.licensed_resource_type, + _licensed_resource_subquery.c.licensed_resources, + licensed_items.c.pricing_plan_id, + licensed_items.c.is_hidden_on_market, + licensed_items.c.created.label("created_at"), + licensed_items.c.modified.label("modified_at"), + ) + .select_from( + licensed_items.join( + _licensed_resource_subquery, + licensed_items.c.licensed_item_id + == _licensed_resource_subquery.c.licensed_item_id, + ) + ) + .where(licensed_items.c.product_name == product_name) + ) + + if filter_by_licensed_resource_type: + base_query = base_query.where( + licensed_items.c.licensed_resource_type == filter_by_licensed_resource_type + ) + if not include_hidden_items_on_market: + base_query = base_query.where(licensed_items.c.is_hidden_on_market.is_(False)) + + # Select total count from base_query + subquery = base_query.subquery() + count_query = select(func.count()).select_from(subquery) + + # Ordering and pagination + if order_by.direction == OrderDirection.ASC: + list_query = base_query.order_by( + asc(getattr(licensed_items.c, order_by.field)), + licensed_items.c.licensed_item_id, + ) + else: + list_query = base_query.order_by( + desc(getattr(licensed_items.c, order_by.field)), + licensed_items.c.licensed_item_id, + ) + list_query = list_query.offset(offset).limit(limit) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) + + result = await conn.stream(list_query) + items: list[LicensedItem] = [ + LicensedItem.model_validate(dict(row)) async for row in result + ] + + return cast(int, total_count), items diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_rest.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_rest.py index 1ed9613317f..505e23cd6a4 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_rest.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_rest.py @@ -1,14 +1,14 @@ import logging from aiohttp import web -from models_library.api_schemas_webserver.licensed_items import ( - LicensedItemRestGet, - LicensedItemRestGetPage, +from models_library.api_schemas_webserver.licensed_items import LicensedItemRestGet +from models_library.api_schemas_webserver.licensed_items_purchases import ( + LicensedItemPurchaseGet, ) +from models_library.licenses import LicensedItemPage from models_library.rest_ordering import OrderBy from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data -from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -24,8 +24,6 @@ from . import _licensed_items_service from ._common.exceptions_handlers import handle_plugin_requests_exceptions from ._common.models import ( - LicensedItem, - LicensedItemPage, LicensedItemsBodyParams, LicensedItemsListQueryParams, LicensedItemsPathParams, @@ -52,34 +50,23 @@ async def list_licensed_items(request: web.Request): await _licensed_items_service.list_licensed_items( app=request.app, product_name=req_ctx.product_name, + include_hidden_items_on_market=False, offset=query_params.offset, limit=query_params.limit, order_by=OrderBy.model_construct(**query_params.order_by.model_dump()), ) ) - licensed_item_get_page: LicensedItemRestGetPage = LicensedItemRestGetPage( - items=[ - LicensedItemRestGet.model_construct( - licensed_item_id=licensed_item.licensed_item_id, - display_name=licensed_item.display_name, - licensed_resource_type=licensed_item.licensed_resource_type, - licensed_resource_data=licensed_item.licensed_resource_data, - pricing_plan_id=licensed_item.pricing_plan_id, - created_at=licensed_item.created_at, - modified_at=licensed_item.modified_at, - ) - for licensed_item in licensed_item_page.items - ], - total=licensed_item_page.total, - ) page = Page[LicensedItemRestGet].model_validate( paginate_data( - chunk=licensed_item_get_page.items, - request_url=request.url, - total=licensed_item_get_page.total, + chunk=[ + LicensedItemRestGet.from_domain_model(licensed_item) + for licensed_item in licensed_item_page.items + ], + total=licensed_item_page.total, limit=query_params.limit, offset=query_params.offset, + request_url=request.url, ) ) return web.Response( @@ -88,34 +75,6 @@ async def list_licensed_items(request: web.Request): ) -@routes.get( - f"/{VTAG}/catalog/licensed-items/{{licensed_item_id}}", name="get_licensed_item" -) -@login_required -@permission_required("catalog/licensed-items.*") -@handle_plugin_requests_exceptions -async def get_licensed_item(request: web.Request): - req_ctx = LicensedItemsRequestContext.model_validate(request) - path_params = parse_request_path_parameters_as(LicensedItemsPathParams, request) - - licensed_item: LicensedItem = await _licensed_items_service.get_licensed_item( - app=request.app, - licensed_item_id=path_params.licensed_item_id, - product_name=req_ctx.product_name, - ) - licensed_item_get = LicensedItemRestGet.model_construct( - licensed_item_id=licensed_item.licensed_item_id, - display_name=licensed_item.display_name, - licensed_resource_type=licensed_item.licensed_resource_type, - pricing_plan_id=licensed_item.pricing_plan_id, - licensed_resource_data=licensed_item.licensed_resource_data, - created_at=licensed_item.created_at, - modified_at=licensed_item.modified_at, - ) - - return envelope_json_response(licensed_item_get) - - @routes.post( f"/{VTAG}/catalog/licensed-items/{{licensed_item_id}}:purchase", name="purchase_licensed_item", @@ -128,11 +87,30 @@ async def purchase_licensed_item(request: web.Request): path_params = parse_request_path_parameters_as(LicensedItemsPathParams, request) body_params = await parse_request_body_as(LicensedItemsBodyParams, request) - await _licensed_items_service.purchase_licensed_item( + purchased_item = await _licensed_items_service.purchase_licensed_item( app=request.app, user_id=req_ctx.user_id, licensed_item_id=path_params.licensed_item_id, product_name=req_ctx.product_name, body_params=body_params, ) - return web.json_response(status=status.HTTP_204_NO_CONTENT) + + output = LicensedItemPurchaseGet( + licensed_item_purchase_id=purchased_item.licensed_item_purchase_id, + product_name=purchased_item.product_name, + licensed_item_id=purchased_item.licensed_item_id, + key=purchased_item.key, + version=purchased_item.version, + wallet_id=purchased_item.wallet_id, + pricing_unit_cost_id=purchased_item.pricing_unit_cost_id, + pricing_unit_cost=purchased_item.pricing_unit_cost, + start_at=purchased_item.start_at, + expire_at=purchased_item.expire_at, + num_of_seats=purchased_item.num_of_seats, + purchased_by_user=purchased_item.purchased_by_user, + user_email=purchased_item.user_email, + purchased_at=purchased_item.purchased_at, + modified_at=purchased_item.modified, + ) + + return envelope_json_response(output) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py index f530b4cb910..90303ff1369 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py @@ -4,8 +4,21 @@ from datetime import UTC, datetime, timedelta from aiohttp import web -from models_library.licensed_items import LicensedItemID +from models_library.api_schemas_resource_usage_tracker.licensed_items_purchases import ( + LicensedItemPurchaseGet, +) +from models_library.licenses import ( + LicensedItem, + LicensedItemID, + LicensedItemKey, + LicensedItemPage, + LicensedItemVersion, +) from models_library.products import ProductName +from models_library.resource_tracker import ( + PricingPlanClassification, + UnitExtraInfoLicense, +) from models_library.resource_tracker_licensed_items_purchases import ( LicensedItemsPurchasesCreate, ) @@ -17,13 +30,17 @@ ) from ..rabbitmq import get_rabbitmq_rpc_client -from ..resource_usage.service import get_pricing_plan_unit +from ..resource_usage.service import get_pricing_plan, get_pricing_plan_unit from ..users.api import get_user from ..wallets.api import get_wallet_with_available_credits_by_user_and_wallet from ..wallets.errors import WalletNotEnoughCreditsError from . import _licensed_items_repository -from ._common.models import LicensedItem, LicensedItemPage, LicensedItemsBodyParams -from .errors import LicensedItemPricingPlanMatchError +from ._common.models import LicensedItemsBodyParams +from .errors import ( + LicensedItemNumOfSeatsMatchError, + LicensedItemPricingPlanConfigurationError, + LicensedItemPricingPlanMatchError, +) _logger = logging.getLogger(__name__) @@ -31,22 +48,13 @@ async def get_licensed_item( app: web.Application, *, - licensed_item_id: LicensedItemID, + key: LicensedItemKey, + version: LicensedItemVersion, product_name: ProductName, ) -> LicensedItem: - licensed_item_db = await _licensed_items_repository.get( - app, licensed_item_id=licensed_item_id, product_name=product_name - ) - return LicensedItem.model_construct( - licensed_item_id=licensed_item_db.licensed_item_id, - display_name=licensed_item_db.display_name, - licensed_resource_name=licensed_item_db.licensed_resource_name, - licensed_resource_type=licensed_item_db.licensed_resource_type, - licensed_resource_data=licensed_item_db.licensed_resource_data, - pricing_plan_id=licensed_item_db.pricing_plan_id, - created_at=licensed_item_db.created, - modified_at=licensed_item_db.modified, + return await _licensed_items_repository.get_licensed_item_by_key_version( + app, key=key, version=version, product_name=product_name ) @@ -54,33 +62,21 @@ async def list_licensed_items( app: web.Application, *, product_name: ProductName, + include_hidden_items_on_market: bool, offset: NonNegativeInt, limit: int, order_by: OrderBy, ) -> LicensedItemPage: - total_count, items = await _licensed_items_repository.list_( + total_count, items = await _licensed_items_repository.list_licensed_items( app, product_name=product_name, + include_hidden_items_on_market=include_hidden_items_on_market, offset=offset, limit=limit, order_by=order_by, - trashed="exclude", - inactive="exclude", ) return LicensedItemPage( - items=[ - LicensedItem.model_construct( - licensed_item_id=licensed_item_db.licensed_item_id, - display_name=licensed_item_db.display_name, - licensed_resource_name=licensed_item_db.licensed_resource_name, - licensed_resource_type=licensed_item_db.licensed_resource_type, - licensed_resource_data=licensed_item_db.licensed_resource_data, - pricing_plan_id=licensed_item_db.pricing_plan_id, - created_at=licensed_item_db.created, - modified_at=licensed_item_db.modified, - ) - for licensed_item_db in items - ], + items=items, total=total_count, ) @@ -92,20 +88,34 @@ async def purchase_licensed_item( user_id: UserID, licensed_item_id: LicensedItemID, body_params: LicensedItemsBodyParams, -) -> None: +) -> LicensedItemPurchaseGet: # Check user wallet permissions wallet = await get_wallet_with_available_credits_by_user_and_wallet( app, user_id=user_id, wallet_id=body_params.wallet_id, product_name=product_name ) - licensed_item = await get_licensed_item( + licensed_item_db = await _licensed_items_repository.get( app, licensed_item_id=licensed_item_id, product_name=product_name ) + licensed_item = await get_licensed_item( + app, + key=licensed_item_db.key, + version=licensed_item_db.version, + product_name=product_name, + ) if licensed_item.pricing_plan_id != body_params.pricing_plan_id: raise LicensedItemPricingPlanMatchError( pricing_plan_id=body_params.pricing_plan_id, - licensed_item_id=licensed_item_id, + licensed_item_id=licensed_item.licensed_item_id, + ) + + pricing_plan = await get_pricing_plan( + app, product_name=product_name, pricing_plan_id=body_params.pricing_plan_id + ) + if pricing_plan.classification is not PricingPlanClassification.LICENSE: + raise LicensedItemPricingPlanConfigurationError( + pricing_plan_id=body_params.pricing_plan_id ) pricing_unit = await get_pricing_plan_unit( @@ -114,6 +124,12 @@ async def purchase_licensed_item( pricing_plan_id=body_params.pricing_plan_id, pricing_unit_id=body_params.pricing_unit_id, ) + assert isinstance(pricing_unit.unit_extra_info, UnitExtraInfoLicense) # nosec + if pricing_unit.unit_extra_info.num_of_seats != body_params.num_of_seats: + raise LicensedItemNumOfSeatsMatchError( + num_of_seats=body_params.num_of_seats, + pricing_unit_id=body_params.pricing_unit_id, + ) # Check whether wallet has enough credits if wallet.available_credits - pricing_unit.current_cost_per_unit < 0: @@ -125,7 +141,9 @@ async def purchase_licensed_item( _data = LicensedItemsPurchasesCreate( product_name=product_name, - licensed_item_id=licensed_item_id, + licensed_item_id=licensed_item.licensed_item_id, + key=licensed_item_db.key, + version=licensed_item_db.version, wallet_id=wallet.wallet_id, wallet_name=wallet.name, pricing_plan_id=body_params.pricing_plan_id, @@ -133,12 +151,13 @@ async def purchase_licensed_item( pricing_unit_cost_id=pricing_unit.current_cost_per_unit_id, pricing_unit_cost=pricing_unit.current_cost_per_unit, start_at=datetime.now(tz=UTC), - expire_at=datetime.now(tz=UTC) - + timedelta(days=30), # <-- Temporary agreement with OM for proof of concept + expire_at=datetime.now(tz=UTC) + timedelta(days=365), num_of_seats=body_params.num_of_seats, purchased_by_user=user_id, user_email=user["email"], purchased_at=datetime.now(tz=UTC), ) rpc_client = get_rabbitmq_rpc_client(app) - await licensed_items_purchases.create_licensed_item_purchase(rpc_client, data=_data) + return await licensed_items_purchases.create_licensed_item_purchase( + rpc_client, data=_data + ) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_resources_repository.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_resources_repository.py new file mode 100644 index 00000000000..c735eb253bc --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_resources_repository.py @@ -0,0 +1,142 @@ +import logging +from typing import Any + +from aiohttp import web +from models_library.licenses import ( + LicensedResourceDB, + LicensedResourceID, + LicensedResourcePatchDB, + LicensedResourceType, +) +from simcore_postgres_database.models.licensed_resources import licensed_resources +from simcore_postgres_database.utils_repos import ( + get_columns_from_db_model, + pass_or_acquire_connection, + transaction_context, +) +from sqlalchemy import func +from sqlalchemy.dialects import postgresql +from sqlalchemy.ext.asyncio import AsyncConnection +from sqlalchemy.sql import select + +from ..db.plugin import get_asyncpg_engine +from .errors import LicensedResourceNotFoundError + +_logger = logging.getLogger(__name__) + + +_SELECTION_ARGS = get_columns_from_db_model(licensed_resources, LicensedResourceDB) + + +def _create_insert_query( + display_name: str, + licensed_resource_name: str, + licensed_resource_type: LicensedResourceType, + licensed_resource_data: dict[str, Any] | None, +): + return ( + postgresql.insert(licensed_resources) + .values( + licensed_resource_name=licensed_resource_name, + licensed_resource_type=licensed_resource_type, + licensed_resource_data=licensed_resource_data, + display_name=display_name, + created=func.now(), + modified=func.now(), + ) + .returning(*_SELECTION_ARGS) + ) + + +async def create_if_not_exists( + app: web.Application, + connection: AsyncConnection | None = None, + *, + display_name: str, + licensed_resource_name: str, + licensed_resource_type: LicensedResourceType, + licensed_resource_data: dict[str, Any] | None = None, +) -> LicensedResourceDB: + + insert_or_none_query = _create_insert_query( + display_name, + licensed_resource_name, + licensed_resource_type, + licensed_resource_data, + ).on_conflict_do_nothing() + + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(insert_or_none_query) + row = result.one_or_none() + + if row is None: + select_query = select(*_SELECTION_ARGS).where( + (licensed_resources.c.licensed_resource_name == licensed_resource_name) + & ( + licensed_resources.c.licensed_resource_type + == licensed_resource_type + ) + ) + + result = await conn.execute(select_query) + row = result.one() + + assert row is not None # nosec + return LicensedResourceDB.model_validate(row) + + +async def get_by_resource_identifier( + app: web.Application, + connection: AsyncConnection | None = None, + *, + licensed_resource_name: str, + licensed_resource_type: LicensedResourceType, +) -> LicensedResourceDB: + select_query = select(*_SELECTION_ARGS).where( + (licensed_resources.c.licensed_resource_name == licensed_resource_name) + & (licensed_resources.c.licensed_resource_type == licensed_resource_type) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(select_query) + row = result.one_or_none() + if row is None: + raise LicensedResourceNotFoundError( + licensed_resource_id="Unknown", + licensed_resource_name=licensed_resource_name, + licensed_resource_type=licensed_resource_type, + ) + return LicensedResourceDB.model_validate(row) + + +async def update( + app: web.Application, + connection: AsyncConnection | None = None, + *, + licensed_resource_id: LicensedResourceID, + updates: LicensedResourcePatchDB, +) -> LicensedResourceDB: + # NOTE: at least 'touch' if updated_values is empty + _updates = { + **updates.model_dump(exclude_unset=True), + licensed_resources.c.modified.name: func.now(), + } + + # trashing + assert "trash" in dict(LicensedResourcePatchDB.model_fields) # nosec + if trash := _updates.pop("trash", None): + _updates[licensed_resources.c.trashed.name] = func.now() if trash else None + + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute( + licensed_resources.update() + .values(**_updates) + .where(licensed_resources.c.licensed_resource_id == licensed_resource_id) + .returning(*_SELECTION_ARGS) + ) + row = result.one_or_none() + if row is None: + raise LicensedResourceNotFoundError( + licensed_resource_id=licensed_resource_id + ) + return LicensedResourceDB.model_validate(row) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_resources_service.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_resources_service.py new file mode 100644 index 00000000000..11cd0495e68 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_resources_service.py @@ -0,0 +1,124 @@ +# pylint: disable=unused-argument + +import logging +from enum import Enum, auto +from pprint import pformat +from typing import NamedTuple + +from aiohttp import web +from deepdiff import DeepDiff # type: ignore[attr-defined] +from models_library.licenses import ( + LicensedResourceDB, + LicensedResourceID, + LicensedResourcePatchDB, + LicensedResourceType, +) +from pydantic import BaseModel + +from . import _licensed_resources_repository +from .errors import LicensedResourceNotFoundError + +_logger = logging.getLogger(__name__) + + +class RegistrationState(Enum): + ALREADY_REGISTERED = auto() + DIFFERENT_RESOURCE = auto() + NEWLY_REGISTERED = auto() + + +class RegistrationResult(NamedTuple): + registered: LicensedResourceDB + state: RegistrationState + message: str | None + + +async def register_licensed_resource( + app: web.Application, + *, + licensed_resource_name: str, + licensed_resource_type: LicensedResourceType, + licensed_resource_data: BaseModel, + licensed_item_display_name: str, +) -> RegistrationResult: + # NOTE about the implementation choice: + # Using `create_if_not_exists` (INSERT with IGNORE_ON_CONFLICT) would have been an option, + # but it generates excessive error logs due to conflicts. + # + # To avoid this, we first attempt to retrieve the resource using `get_by_resource_identifier` (GET). + # If the resource does not exist, we proceed with `create_if_not_exists` (INSERT with IGNORE_ON_CONFLICT). + # + # This approach not only reduces unnecessary error logs but also helps prevent race conditions + # when multiple concurrent calls attempt to register the same resource. + + resource_key = f"{licensed_resource_type}, {licensed_resource_name}" + new_licensed_resource_data = licensed_resource_data.model_dump( + mode="json", + exclude_unset=True, + ) + + try: + licensed_resource = ( + await _licensed_resources_repository.get_by_resource_identifier( + app, + licensed_resource_name=licensed_resource_name, + licensed_resource_type=licensed_resource_type, + ) + ) + + if licensed_resource.licensed_resource_data != new_licensed_resource_data: + ddiff = DeepDiff( + licensed_resource.licensed_resource_data, new_licensed_resource_data + ) + msg = ( + f"DIFFERENT_RESOURCE: {resource_key=} found in licensed_resource_id={licensed_resource.licensed_resource_id} with different data. " + f"Diff:\n\t{pformat(ddiff, indent=2, width=200)}" + ) + return RegistrationResult( + licensed_resource, RegistrationState.DIFFERENT_RESOURCE, msg + ) + + return RegistrationResult( + licensed_resource, + RegistrationState.ALREADY_REGISTERED, + f"ALREADY_REGISTERED: {resource_key=} found in licensed_resource_id={licensed_resource.licensed_resource_id}", + ) + + except LicensedResourceNotFoundError: + licensed_resource = await _licensed_resources_repository.create_if_not_exists( + app, + display_name=licensed_item_display_name, + licensed_resource_name=licensed_resource_name, + licensed_resource_type=licensed_resource_type, + licensed_resource_data=new_licensed_resource_data, + ) + + return RegistrationResult( + licensed_resource, + RegistrationState.NEWLY_REGISTERED, + f"NEWLY_REGISTERED: {resource_key=} registered with licensed_resource_id={licensed_resource.licensed_resource_id}", + ) + + +async def trash_licensed_resource( + app: web.Application, + *, + licensed_resource_id: LicensedResourceID, +) -> None: + await _licensed_resources_repository.update( + app, + licensed_resource_id=licensed_resource_id, + updates=LicensedResourcePatchDB(trash=True), + ) + + +async def untrash_licensed_resource( + app: web.Application, + *, + licensed_resource_id: LicensedResourceID, +) -> None: + await _licensed_resources_repository.update( + app, + licensed_resource_id=licensed_resource_id, + updates=LicensedResourcePatchDB(trash=True), + ) diff --git a/services/web/server/src/simcore_service_webserver/licenses/_rpc.py b/services/web/server/src/simcore_service_webserver/licenses/_rpc.py index 218cc1428e9..e86129ac3f3 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_rpc.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_rpc.py @@ -3,12 +3,13 @@ from models_library.api_schemas_webserver.licensed_items import ( LicensedItemRpcGet, LicensedItemRpcGetPage, + LicensedResource, ) from models_library.api_schemas_webserver.licensed_items_checkouts import ( LicensedItemCheckoutRpcGet, ) from models_library.basic_types import IDStr -from models_library.licensed_items import LicensedItemID +from models_library.licenses import LicensedItemID, LicensedItemPage from models_library.products import ProductName from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, @@ -28,7 +29,6 @@ from ..rabbitmq import get_rabbitmq_rpc_server from . import _licensed_items_checkouts_service, _licensed_items_service -from ._common.models import LicensedItemPage router = RPCRouter() @@ -45,20 +45,27 @@ async def get_licensed_items( await _licensed_items_service.list_licensed_items( app=app, product_name=product_name, + include_hidden_items_on_market=True, offset=offset, limit=limit, - order_by=OrderBy(field=IDStr("licensed_resource_name")), + order_by=OrderBy(field=IDStr("display_name")), ) ) licensed_item_get_page: LicensedItemRpcGetPage = LicensedItemRpcGetPage( items=[ - LicensedItemRpcGet.model_construct( + LicensedItemRpcGet( licensed_item_id=licensed_item.licensed_item_id, + key=licensed_item.key, + version=licensed_item.version, display_name=licensed_item.display_name, licensed_resource_type=licensed_item.licensed_resource_type, - licensed_resource_data=licensed_item.licensed_resource_data, + licensed_resources=[ + LicensedResource(**resource) + for resource in licensed_item.licensed_resources + ], pricing_plan_id=licensed_item.pricing_plan_id, + is_hidden_on_market=licensed_item.is_hidden_on_market, created_at=licensed_item.created_at, modified_at=licensed_item.modified_at, ) @@ -102,9 +109,9 @@ async def checkout_licensed_item_for_wallet( licensed_item_get = ( await _licensed_items_checkouts_service.checkout_licensed_item_for_wallet( app, - licensed_item_id=licensed_item_id, wallet_id=wallet_id, product_name=product_name, + licensed_item_id=licensed_item_id, num_of_seats=num_of_seats, service_run_id=service_run_id, user_id=user_id, @@ -113,6 +120,8 @@ async def checkout_licensed_item_for_wallet( return LicensedItemCheckoutRpcGet.model_construct( licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, licensed_item_id=licensed_item_get.licensed_item_id, + key=licensed_item_get.key, + version=licensed_item_get.version, wallet_id=licensed_item_get.wallet_id, user_id=licensed_item_get.user_id, product_name=licensed_item_get.product_name, @@ -141,6 +150,8 @@ async def release_licensed_item_for_wallet( return LicensedItemCheckoutRpcGet.model_construct( licensed_item_checkout_id=licensed_item_get.licensed_item_checkout_id, licensed_item_id=licensed_item_get.licensed_item_id, + key=licensed_item_get.key, + version=licensed_item_get.version, wallet_id=licensed_item_get.wallet_id, user_id=licensed_item_get.user_id, product_name=licensed_item_get.product_name, diff --git a/services/web/server/src/simcore_service_webserver/licenses/errors.py b/services/web/server/src/simcore_service_webserver/licenses/errors.py index 18c57966123..e6e292e1931 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/errors.py +++ b/services/web/server/src/simcore_service_webserver/licenses/errors.py @@ -6,8 +6,24 @@ class LicensesValueError(WebServerBaseError, ValueError): class LicensedItemNotFoundError(LicensesValueError): - msg_template = "License good {licensed_item_id} not found" + msg_template = "License item {licensed_item_id} not found" + + +class LicensedKeyVersionNotFoundError(LicensesValueError): + msg_template = "License key {key} version {version} not found" + + +class LicensedResourceNotFoundError(LicensesValueError): + msg_template = "License resource {licensed_resource_id} not found" class LicensedItemPricingPlanMatchError(LicensesValueError): msg_template = "The provided pricing plan {pricing_plan_id} does not match the one associated with the licensed item {licensed_item_id}." + + +class LicensedItemPricingPlanConfigurationError(LicensesValueError): + msg_template = "Pricing plan {pricing_plan_id} has wrong configuration. Please contact support." + + +class LicensedItemNumOfSeatsMatchError(LicensesValueError): + msg_template = "Num of seats provided by frontend client {num_of_seats} does not match the one associated to pricing unit {pricing_unit_id}" diff --git a/services/web/server/src/simcore_service_webserver/licenses/plugin.py b/services/web/server/src/simcore_service_webserver/licenses/plugin.py index 859a52bf1bd..2911564007f 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/plugin.py +++ b/services/web/server/src/simcore_service_webserver/licenses/plugin.py @@ -10,11 +10,13 @@ from ..rabbitmq import setup_rabbitmq from ..rest.plugin import setup_rest from . import ( + _itis_vip_syncer_service, _licensed_items_checkouts_rest, _licensed_items_purchases_rest, _licensed_items_rest, _rpc, ) +from .settings import LicensesSettings, get_plugin_settings _logger = logging.getLogger(__name__) @@ -26,7 +28,7 @@ logger=_logger, ) def setup_licenses(app: web.Application): - assert app[APP_SETTINGS_KEY].WEBSERVER_LICENSES # nosec + settings: LicensesSettings = get_plugin_settings(app) # routes setup_rest(app) @@ -37,3 +39,23 @@ def setup_licenses(app: web.Application): setup_rabbitmq(app) if app[APP_SETTINGS_KEY].WEBSERVER_RABBITMQ: app.on_startup.append(_rpc.register_rpc_routes_on_startup) + + if settings.LICENSES_ITIS_VIP_SYNCER_ENABLED and settings.LICENSES_ITIS_VIP: + categories = [] + if settings.LICENSES_ITIS_VIP: + categories += settings.LICENSES_ITIS_VIP.to_categories() + + if settings.LICENSES_SPEAG_PHANTOMS: + categories += settings.LICENSES_SPEAG_PHANTOMS.to_categories() + + if categories: + _itis_vip_syncer_service.setup_itis_vip_syncer( + app, + categories=categories, + resync_after=settings.LICENSES_ITIS_VIP_SYNCER_PERIODICITY, + ) + else: + _logger.warning( + "Skipping setup_itis_vip_syncer. Did not provide any category in settings %s", + settings.model_dump_json(indent=1), + ) diff --git a/services/web/server/src/simcore_service_webserver/licenses/settings.py b/services/web/server/src/simcore_service_webserver/licenses/settings.py new file mode 100644 index 00000000000..3882c88da1d --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/licenses/settings.py @@ -0,0 +1,42 @@ +import datetime +from typing import Annotated + +from aiohttp import web +from pydantic import Field +from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY +from settings_library.base import BaseCustomSettings + +from ._itis_vip_settings import ItisVipSettings, SpeagPhantomsSettings + + +class LicensesSettings(BaseCustomSettings): + # ITIS - VIP + LICENSES_ITIS_VIP: Annotated[ + ItisVipSettings | None, + Field( + description="Settings for VIP licensed models", + json_schema_extra={"auto_default_from_env": True}, + ), + ] + LICENSES_ITIS_VIP_SYNCER_ENABLED: bool = False + LICENSES_ITIS_VIP_SYNCER_PERIODICITY: datetime.timedelta = datetime.timedelta( + days=1 + ) + + # SPEAG - PHANTOMS + LICENSES_SPEAG_PHANTOMS: Annotated[ + SpeagPhantomsSettings | None, + Field( + description="Settings for SPEAG licensed phantoms", + json_schema_extra={"auto_default_from_env": True}, + ), + ] + + # other licensed resources come here ... + + +def get_plugin_settings(app: web.Application) -> LicensesSettings: + settings = app[APP_SETTINGS_KEY].WEBSERVER_LICENSES + assert settings, "setup_settings not called?" # nosec + assert isinstance(settings, LicensesSettings) # nosec + return settings diff --git a/services/web/server/src/simcore_service_webserver/login/_2fa_api.py b/services/web/server/src/simcore_service_webserver/login/_2fa_api.py index fc844dd79f6..cda2bc1721d 100644 --- a/services/web/server/src/simcore_service_webserver/login/_2fa_api.py +++ b/services/web/server/src/simcore_service_webserver/login/_2fa_api.py @@ -1,4 +1,4 @@ -""" two-factor-authentication utils +"""two-factor-authentication utils Currently includes two parts: @@ -10,6 +10,7 @@ import asyncio import logging +import twilio.rest # type: ignore[import-untyped] from aiohttp import web from models_library.users import UserID from pydantic import BaseModel, Field @@ -17,10 +18,9 @@ from servicelib.utils_secrets import generate_passcode from settings_library.twilio import TwilioSettings from twilio.base.exceptions import TwilioException # type: ignore[import-untyped] -from twilio.rest import Client # type: ignore[import-untyped] from ..login.errors import SendingVerificationEmailError, SendingVerificationSmsError -from ..products.api import Product +from ..products.models import Product from ..redis import get_redis_validation_code_client from .utils_email import get_template_path, send_email_from_template @@ -118,7 +118,8 @@ def _sender(): # # SEE https://www.twilio.com/docs/sms/quickstart/python # - client = Client( + # NOTE: this is mocked + client = twilio.rest.Client( twilio_auth.TWILIO_ACCOUNT_SID, twilio_auth.TWILIO_AUTH_TOKEN ) message = client.messages.create(**create_kwargs) diff --git a/services/web/server/src/simcore_service_webserver/login/_2fa_handlers.py b/services/web/server/src/simcore_service_webserver/login/_2fa_handlers.py index 7d35b2e7cca..83c2119dab3 100644 --- a/services/web/server/src/simcore_service_webserver/login/_2fa_handlers.py +++ b/services/web/server/src/simcore_service_webserver/login/_2fa_handlers.py @@ -9,7 +9,8 @@ from servicelib.aiohttp.requests_validation import parse_request_body_as from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from ..products.api import Product, get_current_product +from ..products import products_web +from ..products.models import Product from ..session.access_policies import session_access_required from ._2fa_api import ( create_2fa_code, @@ -51,7 +52,7 @@ class Resend2faBody(InputSchema): @handle_login_exceptions async def resend_2fa_code(request: web.Request): """Resends 2FA code via SMS/Email""" - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) diff --git a/services/web/server/src/simcore_service_webserver/login/_auth_api.py b/services/web/server/src/simcore_service_webserver/login/_auth_api.py index a5de2c1abc5..5e00ae0b9e6 100644 --- a/services/web/server/src/simcore_service_webserver/login/_auth_api.py +++ b/services/web/server/src/simcore_service_webserver/login/_auth_api.py @@ -8,7 +8,7 @@ from ..db.plugin import get_database_engine from ..groups.api import is_user_by_email_in_group -from ..products.api import Product +from ..products.models import Product from ..security.api import check_password, encrypt_password from ._constants import MSG_UNKNOWN_EMAIL, MSG_WRONG_PASSWORD from .storage import AsyncpgStorage, get_plugin_storage diff --git a/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py b/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py index db8ee3421e3..fe1794363d8 100644 --- a/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py +++ b/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py @@ -13,7 +13,8 @@ from simcore_postgres_database.models.users import UserRole from .._meta import API_VTAG -from ..products.api import Product, get_current_product +from ..products import products_web +from ..products.models import Product from ..security.api import forget_identity from ..session.access_policies import ( on_success_grant_session_access_to, @@ -96,7 +97,7 @@ async def login(request: web.Request): If 2FA is enabled, then the login continues with a second request to login_2fa """ - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) @@ -235,7 +236,7 @@ class LoginTwoFactorAuthBody(InputSchema): ) async def login_2fa(request: web.Request): """Login (continuation): Submits 2FA code""" - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) diff --git a/services/web/server/src/simcore_service_webserver/login/_confirmation.py b/services/web/server/src/simcore_service_webserver/login/_confirmation.py index fa9401778e7..386629a7482 100644 --- a/services/web/server/src/simcore_service_webserver/login/_confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/_confirmation.py @@ -1,9 +1,9 @@ -""" Confirmation codes/tokens tools +"""Confirmation codes/tokens tools - Codes are inserted in confirmation tables and they are associated to a user and an action - Used to validate some action (e.g. register, invitation, etc) - Codes can be used one time - Codes have expiration date (duration time is configurable) +Codes are inserted in confirmation tables and they are associated to a user and an action +Used to validate some action (e.g. register, invitation, etc) +Codes can be used one time +Codes have expiration date (duration time is configurable) """ import logging @@ -11,11 +11,11 @@ from urllib.parse import quote from aiohttp import web +from models_library.users import UserID from yarl import URL -from ..db.models import ConfirmationAction from .settings import LoginOptions -from .storage import AsyncpgStorage, ConfirmationTokenDict +from .storage import ActionLiteralStr, AsyncpgStorage, ConfirmationTokenDict log = logging.getLogger(__name__) @@ -61,22 +61,29 @@ def get_expiration_date( return confirmation["created_at"] + lifetime -async def is_confirmation_allowed( - cfg: LoginOptions, db: AsyncpgStorage, user, action: ConfirmationAction -): +async def get_or_create_confirmation( + cfg: LoginOptions, + db: AsyncpgStorage, + user_id: UserID, + action: ActionLiteralStr, +) -> ConfirmationTokenDict: + confirmation: ConfirmationTokenDict | None = await db.get_confirmation( - {"user": user, "action": action} + {"user": {"id": user_id}, "action": action} ) - if not confirmation: - return True - if is_confirmation_expired(cfg, confirmation): + + if confirmation is not None and is_confirmation_expired(cfg, confirmation): await db.delete_confirmation(confirmation) log.warning( "Used expired token [%s]. Deleted from confirmations table.", confirmation, ) - return True - return False + confirmation = None + + if confirmation is None: + confirmation = await db.create_confirmation(user_id, action=action) + + return confirmation def is_confirmation_expired(cfg: LoginOptions, confirmation: ConfirmationTokenDict): diff --git a/services/web/server/src/simcore_service_webserver/login/_constants.py b/services/web/server/src/simcore_service_webserver/login/_constants.py index 99cab2bb95c..cc10d6ed340 100644 --- a/services/web/server/src/simcore_service_webserver/login/_constants.py +++ b/services/web/server/src/simcore_service_webserver/login/_constants.py @@ -1,80 +1,80 @@ from typing import Final -MSG_2FA_CODE_SENT: Final[str] = "Code sent by SMS to {phone_number}" -MSG_2FA_UNAVAILABLE_OEC: Final[ - str -] = "Currently we cannot use 2FA, please try again later ({error_code})" -MSG_ACTIVATED: Final[str] = "Your account is activated" -MSG_ACTIVATION_REQUIRED: Final[ - str -] = "You have to activate your account via email, before you can login" -MSG_AUTH_FAILED: Final[str] = "Authorization failed" -MSG_CANT_SEND_MAIL: Final[str] = "Can't send email, try a little later" -MSG_CHANGE_EMAIL_REQUESTED: Final[ - str -] = "Please click on the verification link we sent to your new email address" -MSG_EMAIL_CHANGED: Final[str] = "Your email is changed" -MSG_EMAIL_ALREADY_REGISTERED: Final[ - str -] = "The email you have provided is already registered" # NOTE: avoid the wording 'product'. User only tries to register in a website. -MSG_EMAIL_SENT: Final[ - str -] = "An email has been sent to {email} with further instructions" -MSG_LOGGED_IN: Final[str] = "You are logged in" -MSG_LOGGED_OUT: Final[str] = "You are logged out" +MSG_2FA_CODE_SENT: Final[str] = "A code was sent by SMS to {phone_number}." +MSG_2FA_UNAVAILABLE: Final[str] = "Two-factor authentication is temporarily unavailable" +MSG_ACTIVATED: Final[str] = "Your account has been activated." +MSG_ACTIVATION_REQUIRED: Final[str] = ( + "Please activate your account via the email we sent before logging in." +) +MSG_AUTH_FAILED: Final[str] = ( + "Authorization was not successful. Please check your credentials and try again." +) +MSG_CANT_SEND_MAIL: Final[str] = ( + "Unable to send email at this time. Please try again later." +) +MSG_CHANGE_EMAIL_REQUESTED: Final[str] = ( + "Please click the verification link sent to your new email address." +) +MSG_EMAIL_CHANGED: Final[str] = "Your email address has been updated." +MSG_EMAIL_ALREADY_REGISTERED: Final[str] = ( + "This email address is already registered. Try logging in or use a different address." +) +MSG_EMAIL_SENT: Final[str] = "An email was sent to {email} with further instructions." +MSG_LOGGED_IN: Final[str] = "You have successfully logged in." +MSG_LOGGED_OUT: Final[str] = "You have successfully logged out." MSG_OFTEN_RESET_PASSWORD: Final[str] = ( - "You can not request of restoring your password so often. Please use" - " the link we sent you recently" + "You've requested a password reset recently. Please use the link we sent you or wait before requesting again." ) MSG_PASSWORD_CHANGE_NOT_ALLOWED: Final[str] = ( - "Cannot reset password: permissions were expired or were removed" - "Please retry and if the problem persist contact {support_email}" -) -MSG_PASSWORD_CHANGED: Final[str] = "Your password is changed" -MSG_PASSWORD_MISMATCH: Final[str] = "Password and confirmation do not match" -MSG_PHONE_MISSING: Final[str] = "No phone was registered for this user" -MSG_UNAUTHORIZED_CODE_RESEND_2FA: Final[ - str -] = "Unauthorized: you cannot resend 2FA code anymore, please restart." -MSG_UNAUTHORIZED_LOGIN_2FA: Final[ - str -] = "Unauthorized: you cannot submit the code anymore, please restart." -MSG_UNAUTHORIZED_REGISTER_PHONE: Final[ - str -] = "Unauthorized: you cannot register the phone anymore, please restart." -MSG_UNAUTHORIZED_PHONE_CONFIRMATION: Final[ - str -] = "Unauthorized: you cannot submit the code anymore, please restart." -MSG_UNKNOWN_EMAIL: Final[str] = "This email is not registered" -MSG_USER_DELETED: Final[ - str -] = "This account was requested for deletion. To reactivate or further information please contact support: {support_email}" -MSG_USER_BANNED: Final[ - str -] = "This user does not have anymore access. Please contact support for further details: {support_email}" -MSG_USER_EXPIRED: Final[ - str -] = "This account has expired and does not have anymore access. Please contact support for further details: {support_email}" - -MSG_USER_DISABLED: Final[ - str -] = "This account was disabled and cannot be registered. Please contact support for further details: {support_email}" - -MSG_WRONG_2FA_CODE__INVALID: Final[ - str -] = "Invalid code. Please provide valid code or generate new code." -MSG_WRONG_2FA_CODE__EXPIRED: Final[str] = "Expired code. Please generate new code." -MSG_WRONG_CAPTCHA__INVALID: Final[ - str -] = "The CAPTCHA code entered was incorrect. Please try again." -MSG_WRONG_PASSWORD: Final[str] = "Wrong password" -MSG_WEAK_PASSWORD: Final[ - str -] = "Password must be at least {LOGIN_PASSWORD_MIN_LENGTH} characters long" - -MSG_INVITATIONS_CONTACT_SUFFIX: Final[ - str -] = "Please contact our support team to get a new invitation." + "Unable to reset password. Permissions may have expired or been removed. " + "Please try again, or contact support if the problem continues: {support_email}" +) +MSG_PASSWORD_CHANGED: Final[str] = "Your password has been updated." +MSG_PASSWORD_MISMATCH: Final[str] = ( + "Password and confirmation do not match. Please try again." +) +MSG_PHONE_MISSING: Final[str] = "No phone number is associated with this account." +MSG_UNAUTHORIZED_CODE_RESEND_2FA: Final[str] = ( + "You can no longer resend the code. Please restart the verification process." +) +MSG_UNAUTHORIZED_LOGIN_2FA: Final[str] = ( + "You can no longer submit a code. Please restart the login process." +) +MSG_UNAUTHORIZED_REGISTER_PHONE: Final[str] = ( + "Phone registration is no longer allowed. Please restart the registration process." +) +MSG_UNAUTHORIZED_PHONE_CONFIRMATION: Final[str] = ( + "You can no longer submit a code. Please restart the confirmation process." +) +MSG_UNKNOWN_EMAIL: Final[str] = "This email address is not registered." +MSG_USER_DELETED: Final[str] = ( + "This account is scheduled for deletion. To reactivate it or for more information, please contact support: {support_email}" +) +MSG_USER_BANNED: Final[str] = ( + "Access to this account is no longer available. Please contact support for more information: {support_email}" +) +MSG_USER_EXPIRED: Final[str] = ( + "This account has expired and access is no longer available. Please contact support for assistance: {support_email}" +) +MSG_USER_DISABLED: Final[str] = ( + "This account has been disabled and cannot be registered again. Please contact support for details: {support_email}" +) +MSG_WRONG_2FA_CODE__INVALID: Final[str] = ( + "The code entered is not valid. Please enter a valid code or generate a new one." +) +MSG_WRONG_2FA_CODE__EXPIRED: Final[str] = ( + "The code has expired. Please generate a new code." +) +MSG_WRONG_CAPTCHA__INVALID: Final[str] = ( + "The CAPTCHA entered is incorrect. Please try again." +) +MSG_WRONG_PASSWORD: Final[str] = "The password is incorrect. Please try again." +MSG_WEAK_PASSWORD: Final[str] = ( + "Password must be at least {LOGIN_PASSWORD_MIN_LENGTH} characters long." +) +MSG_INVITATIONS_CONTACT_SUFFIX: Final[str] = ( + "Please contact our support team to request a new invitation." +) # Login Accepted Response Codes: # - These string codes are used to identify next step in the login (e.g. login_2fa or register_phone?) @@ -86,9 +86,9 @@ # App keys for login plugin # Naming convention: APP_LOGIN_...KEY -APP_LOGIN_SETTINGS_PER_PRODUCT_KEY: Final[ - str -] = f"{__name__}.LOGIN_SETTINGS_PER_PRODUCT" +APP_LOGIN_SETTINGS_PER_PRODUCT_KEY: Final[str] = ( + f"{__name__}.LOGIN_SETTINGS_PER_PRODUCT" +) # maximum amount the user can resend the code via email or phone diff --git a/services/web/server/src/simcore_service_webserver/login/_registration.py b/services/web/server/src/simcore_service_webserver/login/_registration.py index 6471757d183..0e924def7b1 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration.py @@ -1,7 +1,7 @@ -""" Core functionality and tools for user's registration +"""Core functionality and tools for user's registration - - registration code - - invitation code +- registration code +- invitation code """ import logging @@ -39,7 +39,7 @@ InvalidInvitationError, InvitationsServiceUnavailableError, ) -from ..products.api import Product +from ..products.models import Product from ._confirmation import is_confirmation_expired, validate_confirmation_code from ._constants import ( MSG_EMAIL_ALREADY_REGISTERED, @@ -214,9 +214,7 @@ def _invitations_request_context(invitation_code: str) -> Iterator[URL]: except (ValidationError, InvalidInvitationError) as err: error_code = create_error_code(err) - user_error_msg = ( - f"Invalid invitation. {MSG_INVITATIONS_CONTACT_SUFFIX} [{error_code}]" - ) + user_error_msg = f"Invalid invitation. {MSG_INVITATIONS_CONTACT_SUFFIX}" _logger.exception( **create_troubleshotting_log_kwargs( @@ -233,7 +231,7 @@ def _invitations_request_context(invitation_code: str) -> Iterator[URL]: except InvitationsServiceUnavailableError as err: error_code = create_error_code(err) - user_error_msg = f"Unable to process your invitation since the invitations service is currently unavailable [{error_code}]" + user_error_msg = "Unable to process your invitation since the invitations service is currently unavailable" _logger.exception( **create_troubleshotting_log_kwargs( diff --git a/services/web/server/src/simcore_service_webserver/login/_registration_api.py b/services/web/server/src/simcore_service_webserver/login/_registration_api.py index 2d538942680..3e248fcabee 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration_api.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration_api.py @@ -13,7 +13,8 @@ from servicelib.utils_secrets import generate_passcode from ..email.utils import send_email_from_template -from ..products.api import Product, get_current_product, get_product_template_path +from ..products import products_web +from ..products.models import Product _logger = logging.getLogger(__name__) @@ -25,8 +26,10 @@ async def send_close_account_email( retention_days: PositiveInt, ): template_name = "close_account.jinja2" - email_template_path = await get_product_template_path(request, template_name) - product = get_current_product(request) + email_template_path = await products_web.get_product_template_path( + request, template_name + ) + product: Product = products_web.get_current_product(request) try: await send_email_from_template( @@ -64,7 +67,9 @@ async def send_account_request_email_to_support( ): template_name = "request_account.jinja2" destination_email = product.product_owners_email or product.support_email - email_template_path = await get_product_template_path(request, template_name) + email_template_path = await products_web.get_product_template_path( + request, template_name + ) try: user_email = TypeAdapter(LowerCaseEmailStr).validate_python( request_form.get("email", None) diff --git a/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py b/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py index 42e8229e7a6..2cbb69db5ee 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py @@ -16,9 +16,10 @@ from servicelib.request_keys import RQT_USERID_KEY from servicelib.utils import fire_and_forget_task -from .._constants import RQ_PRODUCT_KEY from .._meta import API_VTAG -from ..products.api import Product, get_current_product +from ..constants import RQ_PRODUCT_KEY +from ..products import products_web +from ..products.models import Product from ..security.api import check_password, forget_identity from ..security.decorators import permission_required from ..session.api import get_session @@ -62,7 +63,7 @@ def _get_ipinfo(request: web.Request) -> dict[str, Any]: ) @global_rate_limit_route(number_of_requests=30, interval_seconds=MINUTE) async def request_product_account(request: web.Request): - product = get_current_product(request) + product = products_web.get_current_product(request) session = await get_session(request) body = await parse_request_body_as(AccountRequestInfo, request) @@ -101,7 +102,7 @@ async def unregister_account(request: web.Request): req_ctx = _AuthenticatedContext.model_validate(request) body = await parse_request_body_as(UnregisterCheck, request) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) diff --git a/services/web/server/src/simcore_service_webserver/login/decorators.py b/services/web/server/src/simcore_service_webserver/login/decorators.py index 1fd2bc90871..e5be70e1efb 100644 --- a/services/web/server/src/simcore_service_webserver/login/decorators.py +++ b/services/web/server/src/simcore_service_webserver/login/decorators.py @@ -7,7 +7,7 @@ from servicelib.aiohttp.typing_extension import HandlerAnyReturn from servicelib.request_keys import RQT_USERID_KEY -from ..products.api import get_product_name +from ..products import products_web from ..security.api import ( PERMISSION_PRODUCT_LOGIN_KEY, AuthContextDict, @@ -62,7 +62,7 @@ async def _wrapper(request: web.Request): request, PERMISSION_PRODUCT_LOGIN_KEY, context=AuthContextDict( - product_name=get_product_name(request), + product_name=products_web.get_product_name(request), authorized_uid=user_id, ), ) diff --git a/services/web/server/src/simcore_service_webserver/login/errors.py b/services/web/server/src/simcore_service_webserver/login/errors.py index 56588b87df6..835c971d312 100644 --- a/services/web/server/src/simcore_service_webserver/login/errors.py +++ b/services/web/server/src/simcore_service_webserver/login/errors.py @@ -7,13 +7,12 @@ from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from ..errors import WebServerBaseError -from ._constants import MSG_2FA_UNAVAILABLE_OEC +from ._constants import MSG_2FA_UNAVAILABLE _logger = logging.getLogger(__name__) -class LoginError(WebServerBaseError, ValueError): - ... +class LoginError(WebServerBaseError, ValueError): ... class SendingVerificationSmsError(LoginError): @@ -32,7 +31,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: except (SendingVerificationSmsError, SendingVerificationEmailError) as exc: error_code = exc.error_code() - front_end_msg = MSG_2FA_UNAVAILABLE_OEC.format(error_code=error_code) + front_end_msg = MSG_2FA_UNAVAILABLE # in these cases I want to log the cause _logger.exception( **create_troubleshotting_log_kwargs( diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_change.py b/services/web/server/src/simcore_service_webserver/login/handlers_change.py index 75c93ff990e..6710022bf74 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_change.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_change.py @@ -5,24 +5,26 @@ from models_library.emails import LowerCaseEmailStr from pydantic import SecretStr, field_validator from servicelib.aiohttp.requests_validation import parse_request_body_as +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.request_keys import RQT_USERID_KEY from simcore_postgres_database.utils_users import UsersRepo -from simcore_service_webserver.db.plugin import get_database_engine from .._meta import API_VTAG -from ..products.api import Product, get_current_product +from ..db.plugin import get_database_engine +from ..products import products_web +from ..products.models import Product from ..security.api import check_password, encrypt_password +from ..users import api as users_service from ..utils import HOUR from ..utils_rate_limiting import global_rate_limit_route -from ._confirmation import is_confirmation_allowed, make_confirmation_link +from ._confirmation import get_or_create_confirmation, make_confirmation_link from ._constants import ( MSG_CANT_SEND_MAIL, MSG_CHANGE_EMAIL_REQUESTED, MSG_EMAIL_SENT, MSG_OFTEN_RESET_PASSWORD, MSG_PASSWORD_CHANGED, - MSG_UNKNOWN_EMAIL, MSG_WRONG_PASSWORD, ) from ._models import InputSchema, create_password_match_validator @@ -32,7 +34,6 @@ from .utils import ( ACTIVE, CHANGE_EMAIL, - RESET_PASSWORD, flash_response, validate_user_status, ) @@ -45,71 +46,147 @@ class ResetPasswordBody(InputSchema): - email: str + email: LowerCaseEmailStr -@routes.post(f"/{API_VTAG}/auth/reset-password", name="auth_reset_password") -@global_rate_limit_route(number_of_requests=10, interval_seconds=HOUR) -async def submit_request_to_reset_password(request: web.Request): - """ - 1. confirm user exists - 2. check user status - 3. send email with link to reset password - 4. user clicks confirmation link -> auth/confirmation/{} -> reset_password_allowed - - Follows guidelines from [1]: https://postmarkapp.com/guides/password-reset-email-best-practices - - You would never want to confirm or deny the existence of an account with a given email or username. - - Expiration of link - - Support contact information - - Who requested the reset? +@routes.post(f"/{API_VTAG}/auth/reset-password", name="initiate_reset_password") +@global_rate_limit_route( + number_of_requests=10, interval_seconds=HOUR, error_msg=MSG_OFTEN_RESET_PASSWORD +) +async def initiate_reset_password(request: web.Request): + """First of the "Two-Step Action Confirmation pattern": initiate_reset_password + complete_reset_password(code) + + + ```mermaid + sequenceDiagram + participant User + participant Frontend + participant Backend + participant Email + + User->>Backend: POST initiate_password_reset(email) + Backend->>Email: Send confirmation link with code + Note right of Email: Link: GET /auth_confirmation?code=XXX + + User->>Backend: GET auth_confirmation?code=XXX + Backend-->>User: Redirect to /#35;reset-password?code=XXX + + User->>Frontend: Access /#35;reset-password?code=XXX + Frontend->>User: Show form for new password (x2) + + User->>Frontend: Enters new password and confirms + Frontend->>Backend: POST complete_password_reset(code, new_password) + + Backend-->>User: Password reset confirmation + Backend->>Backend: Update user's password in database + ``` + + + Follows guidelines from https://postmarkapp.com/guides/password-reset-email-best-practices + - 1. You would never want to confirm or deny the existence of an account with a given email or username. + - 2. Expiration of link + - 3. Support contact information + - 4. Who requested the reset? """ db: AsyncpgStorage = get_plugin_storage(request.app) cfg: LoginOptions = get_plugin_options(request.app) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) request_body = await parse_request_body_as(ResetPasswordBody, request) + _error_msg_prefix, _error_msg_suffix = ( + "Password reset initiated", + "Ignoring request.", + ) + + def _get_error_context( + user=None, + ) -> dict[str, str]: + # NOTE: Guideline #4 + ctx = { + "user_email": request_body.email, + "product_name": product.name, + "request.remote": f"{request.remote}", + "request.method": f"{request.method}", + "request.path": f"{request.path}", + } + + if user: + ctx.update( + { + "user_email": request_body.email, + "user_id": user["id"], + "user_status": user["status"], + "user_role": user["role"], + } + ) + return ctx + + ok = True + + # CHECK user exists user = await db.get_user({"email": request_body.email}) - try: - if not user: - raise web.HTTPUnprocessableEntity( - reason=MSG_UNKNOWN_EMAIL, content_type=MIMETYPE_APPLICATION_JSON - ) # 422 + if not user: + _logger.warning( + **create_troubleshotting_log_kwargs( + f"{_error_msg_prefix} for non-existent email. {_error_msg_suffix}", + error=Exception("No user found with this email"), + error_context=_get_error_context(), + ) + ) + ok = False - validate_user_status(user=dict(user), support_email=product.support_email) + if ok: + assert user # nosec + assert user["email"] == request_body.email # nosec + # CHECK user state + try: + validate_user_status(user=dict(user), support_email=product.support_email) + except web.HTTPError as err: + # NOTE: we abuse here (untiby reusing `validate_user_status` and catching http errors that we + # do not want to forward but rather log due to the special rules in this entrypoint + _logger.warning( + **create_troubleshotting_log_kwargs( + f"{_error_msg_prefix} for invalid user. {_error_msg_suffix}.", + error=err, + error_context=_get_error_context(user), + ) + ) + ok = False + + if ok: + assert user # nosec assert user["status"] == ACTIVE # nosec - assert user["email"] == request_body.email # nosec + assert isinstance(user["id"], int) # nosec + + # CHECK access to product + if not await users_service.is_user_in_product( + request.app, user_id=user["id"], product_name=product.name + ): + _logger.warning( + **create_troubleshotting_log_kwargs( + f"{_error_msg_prefix} for a user with NO access to this product. {_error_msg_suffix}.", + error=Exception("User cannot access this product"), + error_context=_get_error_context(user), + ) + ) + ok = False - if not await is_confirmation_allowed(cfg, db, user, action=RESET_PASSWORD): - raise web.HTTPUnauthorized( - reason=MSG_OFTEN_RESET_PASSWORD, - content_type=MIMETYPE_APPLICATION_JSON, - ) # 401 + if ok: + assert user # nosec - except web.HTTPError as err: try: - await send_email_from_template( - request, - from_=product.support_email, - to=request_body.email, - template=await get_template_path( - request, "reset_password_email_failed.jinja2" - ), - context={ - "host": request.host, - "reason": err.reason, - "product": product, - }, + # Confirmation token that includes code to `complete_reset_password`. + # Recreated if non-existent or expired (Guideline #2) + confirmation = await get_or_create_confirmation( + cfg, db, user_id=user["id"], action="RESET_PASSWORD" ) - except Exception as err_mail: # pylint: disable=broad-except - _logger.exception("Cannot send email") - raise web.HTTPServiceUnavailable(reason=MSG_CANT_SEND_MAIL) from err_mail - else: - confirmation = await db.create_confirmation(user["id"], action=RESET_PASSWORD) - link = make_confirmation_link(request, confirmation) - try: + + # Produce a link so that the front-end can hit `complete_reset_password` + link = make_confirmation_link(request, confirmation) + # primary reset email with a URL and the normal instructions. await send_email_from_template( request, @@ -119,16 +196,24 @@ async def submit_request_to_reset_password(request: web.Request): request, "reset_password_email.jinja2" ), context={ + "name": user.get("first_name") or user["name"], "host": request.host, "link": link, + # NOTE: Guideline #3 "product": product, }, ) except Exception as err: # pylint: disable=broad-except - _logger.exception("Can not send email") - await db.delete_confirmation(confirmation) + _logger.exception( + **create_troubleshotting_log_kwargs( + "Unable to send email", + error=err, + error_context=_get_error_context(user), + ) + ) raise web.HTTPServiceUnavailable(reason=MSG_CANT_SEND_MAIL) from err + # NOTE: Always same response: guideline #1 return flash_response(MSG_EMAIL_SENT.format(email=request_body.email), "INFO") @@ -139,7 +224,7 @@ class ChangeEmailBody(InputSchema): async def submit_request_to_change_email(request: web.Request): # NOTE: This code have been intentially disabled in https://github.com/ITISFoundation/osparc-simcore/pull/5472 db: AsyncpgStorage = get_plugin_storage(request.app) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) request_body = await parse_request_body_as(ChangeEmailBody, request) @@ -160,7 +245,7 @@ async def submit_request_to_change_email(request: web.Request): # create new confirmation to ensure email is actually valid confirmation = await db.create_confirmation( - user["id"], CHANGE_EMAIL, request_body.email + user_id=user["id"], action="CHANGE_EMAIL", data=request_body.email ) link = make_confirmation_link(request, confirmation) try: diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py b/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py index 2fe63036378..f4b9bb755a8 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py @@ -23,13 +23,14 @@ ) from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from simcore_postgres_database.errors import UniqueViolation +from simcore_postgres_database.aiopg_errors import UniqueViolation from yarl import URL -from ..products.api import Product, get_current_product +from ..products import products_web +from ..products.models import Product from ..security.api import encrypt_password from ..session.access_policies import session_access_required -from ..utils import MINUTE +from ..utils import HOUR, MINUTE from ..utils_aiohttp import create_redirect_to_page_response from ..utils_rate_limiting import global_rate_limit_route from ._2fa_api import delete_2fa_code, get_2fa_code @@ -138,7 +139,7 @@ async def validate_confirmation_and_redirect(request: web.Request): """ db: AsyncpgStorage = get_plugin_storage(request.app) cfg: LoginOptions = get_plugin_options(request.app) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) path_params = parse_request_path_parameters_as(_PathParam, request) @@ -186,8 +187,7 @@ async def validate_confirmation_and_redirect(request: web.Request): error_code = create_error_code(err) user_error_msg = ( f"Sorry, we cannot confirm your {action}." - "Please try again in a few moments. " - f"If the problem persist please contact support attaching this code ({error_code})" + "Please try again in a few moments." ) _logger.exception( @@ -224,7 +224,7 @@ class PhoneConfirmationBody(InputSchema): unauthorized_reason=MSG_UNAUTHORIZED_PHONE_CONFIRMATION, ) async def phone_confirmation(request: web.Request): - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) @@ -272,15 +272,17 @@ class ResetPasswordConfirmation(InputSchema): _password_confirm_match = field_validator("confirm")(check_confirm_password_match) -@routes.post("/v0/auth/reset-password/{code}", name="auth_reset_password_allowed") -async def reset_password(request: web.Request): - """Changes password using a token code without being logged in +@routes.post("/v0/auth/reset-password/{code}", name="complete_reset_password") +@global_rate_limit_route(number_of_requests=10, interval_seconds=HOUR) +async def complete_reset_password(request: web.Request): + """Last of the "Two-Step Action Confirmation pattern": initiate_reset_password + complete_reset_password(code) - Code is provided via email by calling first submit_request_to_reset_password + - Changes password using a token code without login + - Code is provided via email by calling first initiate_reset_password """ db: AsyncpgStorage = get_plugin_storage(request.app) cfg: LoginOptions = get_plugin_options(request.app) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) path_params = parse_request_path_parameters_as(_PathParam, request) request_body = await parse_request_body_as(ResetPasswordConfirmation, request) @@ -294,8 +296,8 @@ async def reset_password(request: web.Request): assert user # nosec await db.update_user( - dict(user), - { + user={"id": user["id"]}, + updates={ "password_hash": encrypt_password( request_body.password.get_secret_value() ) diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py index 3d00ab57c03..e91556f4424 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py @@ -23,7 +23,8 @@ from .._meta import API_VTAG from ..groups.api import auto_add_user_to_groups, auto_add_user_to_product_group from ..invitations.api import is_service_invitation_code -from ..products.api import Product, get_current_product +from ..products import products_web +from ..products.models import Product from ..session.access_policies import ( on_success_grant_session_access_to, session_access_required, @@ -58,7 +59,6 @@ ) from .storage import AsyncpgStorage, ConfirmationTokenDict, get_plugin_storage from .utils import ( - REGISTRATION, envelope_response, flash_response, get_user_name_from_email, @@ -94,7 +94,7 @@ async def check_registration_invitation(request: web.Request): raises HTTPForbidden, HTTPServiceUnavailable """ - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) @@ -145,7 +145,7 @@ async def register(request: web.Request): An email with a link to 'email_confirmation' is sent to complete registration """ - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) @@ -249,7 +249,9 @@ async def register(request: web.Request): if settings.LOGIN_REGISTRATION_CONFIRMATION_REQUIRED: # Confirmation required: send confirmation email _confirmation: ConfirmationTokenDict = await db.create_confirmation( - user["id"], REGISTRATION, data=invitation.model_dump_json() if invitation else None + user_id=user["id"], + action="REGISTRATION", + data=invitation.model_dump_json() if invitation else None, ) try: @@ -272,7 +274,7 @@ async def register(request: web.Request): ) except Exception as err: # pylint: disable=broad-except error_code = create_error_code(err) - user_error_msg = f"{MSG_CANT_SEND_MAIL} [{error_code}]" + user_error_msg = MSG_CANT_SEND_MAIL _logger.exception( **create_troubleshotting_log_kwargs( @@ -358,7 +360,7 @@ async def register_phone(request: web.Request): - sends a code - registration is completed requesting to 'phone_confirmation' route with the code received """ - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) @@ -414,7 +416,7 @@ async def register_phone(request: web.Request): except Exception as err: # pylint: disable=broad-except # Unhandled errors -> 503 error_code = create_error_code(err) - user_error_msg = f"Currently we cannot register phone numbers [{error_code}]" + user_error_msg = "Currently we cannot register phone numbers" _logger.exception( **create_troubleshotting_log_kwargs( diff --git a/services/web/server/src/simcore_service_webserver/login/plugin.py b/services/web/server/src/simcore_service_webserver/login/plugin.py index ef0c77c2f18..149780b668e 100644 --- a/services/web/server/src/simcore_service_webserver/login/plugin.py +++ b/services/web/server/src/simcore_service_webserver/login/plugin.py @@ -8,7 +8,7 @@ from settings_library.email import SMTPSettings from settings_library.postgres import PostgresSettings -from .._constants import ( +from ..constants import ( APP_PUBLIC_CONFIG_PER_PRODUCT, APP_SETTINGS_KEY, INDEX_RESOURCE_NAME, @@ -18,7 +18,8 @@ from ..email.plugin import setup_email from ..email.settings import get_plugin_settings as get_email_plugin_settings from ..invitations.plugin import setup_invitations -from ..products.api import ProductName, list_products +from ..products import products_service +from ..products.models import ProductName from ..products.plugin import setup_products from ..redis import setup_redis from ..rest.plugin import setup_rest @@ -90,7 +91,7 @@ async def _resolve_login_settings_per_product(app: web.Application): # compose app and product settings errors = {} - for product in list_products(app): + for product in products_service.list_products(app): try: login_settings_per_product[ product.name diff --git a/services/web/server/src/simcore_service_webserver/login/utils.py b/services/web/server/src/simcore_service_webserver/login/utils.py index 22d0a57cf3e..259795c8cc3 100644 --- a/services/web/server/src/simcore_service_webserver/login/utils.py +++ b/services/web/server/src/simcore_service_webserver/login/utils.py @@ -21,7 +21,7 @@ ) -def _to_names(enum_cls, names): +def _to_names(enum_cls, names) -> list[str]: """ensures names are in enum be retrieving each of them""" return [getattr(enum_cls, att).name for att in names.split()] diff --git a/services/web/server/src/simcore_service_webserver/login/utils_email.py b/services/web/server/src/simcore_service_webserver/login/utils_email.py index a5746e2fde8..9aef8317104 100644 --- a/services/web/server/src/simcore_service_webserver/login/utils_email.py +++ b/services/web/server/src/simcore_service_webserver/login/utils_email.py @@ -5,7 +5,7 @@ from .._resources import webserver_resources from ..email.utils import AttachmentTuple, send_email_from_template -from ..products.api import get_product_template_path +from ..products import products_web log = logging.getLogger(__name__) @@ -16,7 +16,7 @@ def themed(dirname: str, template: str) -> Path: async def get_template_path(request: web.Request, filename: str) -> Path: - return await get_product_template_path(request, filename) + return await products_web.get_product_template_path(request, filename) # prevents auto-removal by pycln diff --git a/services/web/server/src/simcore_service_webserver/long_running_tasks.py b/services/web/server/src/simcore_service_webserver/long_running_tasks.py index 29dd8d7caec..c2f842eab7a 100644 --- a/services/web/server/src/simcore_service_webserver/long_running_tasks.py +++ b/services/web/server/src/simcore_service_webserver/long_running_tasks.py @@ -29,7 +29,7 @@ async def _test_task_context_decorator( def setup_long_running_tasks(app: web.Application) -> None: setup( app, - router_prefix=f"/{API_VTAG}/tasks", + router_prefix=f"/{API_VTAG}/tasks-legacy", handler_check_decorator=login_required, task_request_context_decorator=_webserver_request_context_decorator, ) diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_function_nodes.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_function_nodes.py deleted file mode 100644 index 029a733c6a3..00000000000 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_function_nodes.py +++ /dev/null @@ -1,48 +0,0 @@ -""" Nodes (services) in a project implemented with python functions (denoted meta-function nodes) - - -So far, nodes in a project could be front-end, computational or dynamic. The first -was fully implemented in the web-client (e.g. file-picker) while the two last were implemented -independently as black-boxes (i.e. basically only i/o description known) inside of containers. Here -we start a new type of nodes declared as "front-end" but implemented as python functions in the backend. - -Meta-function nodes are evaluated in the backend in the pre-run stage of a meta-project run. - -An example of meta-function is the "Integers Iterator" node. -""" - -from copy import deepcopy - -from models_library.function_services_catalog import catalog, is_iterator_service -from models_library.projects_nodes import Node -from models_library.services_types import ServiceKey, ServiceVersion - -# META-FUNCTIONS --------------------------------------------------- -assert catalog # nosec - -# UTILS --------------------------------------------------------------- - - -def create_param_node_from_iterator_with_outputs(iterator_node: Node) -> Node: - """ - Converts an iterator_node with outputs (i.e. evaluated) to a parameter-node - that represents a constant value. - """ - assert is_iterator_service(iterator_node.key) # nosec - assert iterator_node.version == "1.0.0" # nosec - - return Node( - key=ServiceKey("simcore/services/frontend/parameter/integer"), - version=ServiceVersion("1.0.0"), - label=iterator_node.label, - inputs={}, - input_nodes=[], - thumbnail="", # NOTE: hack due to issue in projects json-schema - outputs=deepcopy(iterator_node.outputs), - ) - - -__all__: tuple[str, ...] = ( - "catalog", - "create_param_node_from_iterator_with_outputs", -) diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py deleted file mode 100644 index 847395e6acd..00000000000 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py +++ /dev/null @@ -1,410 +0,0 @@ -""" web-api handler functions added by the meta app's module - -""" -import logging -from collections.abc import Callable -from typing import NamedTuple - -from aiohttp import web -from models_library.projects import ProjectID -from models_library.rest_pagination import Page, PageQueryParameters -from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, ValidationError, field_validator -from pydantic.fields import Field -from pydantic.networks import HttpUrl -from servicelib.rest_constants import RESPONSE_MODEL_POLICY - -from .._meta import API_VTAG as VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import create_url_for_function, envelope_json_response -from ..version_control.models import CheckpointID, CommitID, TagProxy -from ..version_control.vc_tags import parse_workcopy_project_tag_name -from ._iterations import IterationID, ProjectIteration -from ._results import ExtractedResults, extract_project_results -from ._version_control import VersionControlForMetaModeling - -_logger = logging.getLogger(__name__) - -# HANDLER'S CORE IMPLEMENTATION ------------------------------------------------------------ - - -class ParametersModel(PageQueryParameters): - project_uuid: ProjectID - ref_id: CommitID - - @field_validator("ref_id", mode="before") - @classmethod - def tags_as_refid_not_implemented(cls, v): - try: - return CommitID(v) - except ValueError as err: - # e.g. HEAD - msg = "cannot convert ref (e.g. HEAD) -> commit id" - raise NotImplementedError(msg) from err - - -def parse_query_parameters(request: web.Request) -> ParametersModel: - try: - return ParametersModel(**request.match_info) - except ValidationError as err: - raise web.HTTPUnprocessableEntity( - reason=f"Invalid query parameters: {err}" - ) from err - - -class _NotTaggedAsIterationError(Exception): - """A commit does not contain the tags - to be identified as an iterator - """ - - -class IterationItem(NamedTuple): - project_id: ProjectID - commit_id: CommitID - iteration_index: IterationID - - -class _IterationsRange(NamedTuple): - items: list[IterationItem] - total_count: int - - -async def _get_project_iterations_range( - vc_repo: VersionControlForMetaModeling, - project_uuid: ProjectID, - commit_id: CommitID, - offset: int = 0, - limit: int | None = None, -) -> _IterationsRange: - assert offset >= 0 # nosec - - repo_id = await vc_repo.get_repo_id(project_uuid) - assert repo_id is not None - - total_number_of_iterations = 0 - - # Searches all subsequent commits (i.e. children) and retrieve their tags - tags_per_child: list[list[TagProxy]] = await vc_repo.get_children_tags( - repo_id, commit_id - ) - - iter_items: list[IterationItem] = [] - for n, tags in enumerate(tags_per_child): - try: - iteration: ProjectIteration | None = None - workcopy_id: ProjectID | None = None - - for tag in tags: - if pim := ProjectIteration.from_tag_name( - tag.name, return_none_if_fails=True - ): - if iteration: - msg = f"This commit_id={commit_id!r} has more than one iteration tag={tag!r}" - raise _NotTaggedAsIterationError(msg) - iteration = pim - elif pid := parse_workcopy_project_tag_name(tag.name): - if workcopy_id: - msg = f"This commit_id={commit_id!r} has more than one workcopy tag={tag!r}" - raise _NotTaggedAsIterationError(msg) - workcopy_id = pid - else: - _logger.debug( - "Got %s for children of %s", f"{tag=}", f"{commit_id=}" - ) - - if not workcopy_id: - msg = f"No workcopy tag found in tags={tags!r}" - raise _NotTaggedAsIterationError(msg) - if not iteration: - msg = f"No iteration tag found in tags={tags!r}" - raise _NotTaggedAsIterationError(msg) - - iter_items.append( - IterationItem( - project_id=workcopy_id, - commit_id=iteration.repo_commit_id, - iteration_index=iteration.iteration_index, - ) - ) - - except _NotTaggedAsIterationError as err: - _logger.warning( - "Skipping %d-th child since is not tagged as an iteration of %s/%s: %s", - n, - f"{repo_id=}", - f"{commit_id=}", - f"{err=}", - ) - - # Selects range on those tagged as iterations and returned their assigned workcopy id - total_number_of_iterations = len(iter_items) - - # sort and select. If requested interval is outside of range, it returns empty - iter_items.sort(key=lambda item: item.iteration_index) - - if limit is None: - return _IterationsRange( - items=iter_items[offset:], - total_count=total_number_of_iterations, - ) - - return _IterationsRange( - items=iter_items[offset : (offset + limit)], - total_count=total_number_of_iterations, - ) - - -async def create_or_get_project_iterations( - vc_repo: VersionControlForMetaModeling, - project_uuid: ProjectID, - commit_id: CommitID, -) -> list[IterationItem]: - raise NotImplementedError - - -# MODELS ------------------------------------------------------------ - - -class ParentMetaProjectRef(BaseModel): - project_id: ProjectID - ref_id: CheckpointID - - -class _BaseModelGet(BaseModel): - name: str = Field(..., description="Iteration's resource API name") - parent: ParentMetaProjectRef = Field( - ..., description="Reference to the the meta-project that created this iteration" - ) - - -class ProjectIterationItem(_BaseModelGet): - iteration_index: IterationID = Field(...) - - workcopy_project_id: ProjectID = Field( - ..., - description="ID to this iteration's working copy." - "A working copy is a real project where this iteration is run", - ) - - workcopy_project_url: HttpUrl = Field( - ..., description="reference to a working copy project" - ) - - @classmethod - def create_iteration( - cls, - meta_project_uuid, - meta_project_commit_id, - iteration_index, - project_id, - url_for: Callable, - ): - return cls( - name=f"projects/{meta_project_uuid}/checkpoint/{meta_project_commit_id}/iterations/{iteration_index}", - parent=ParentMetaProjectRef( - project_id=meta_project_uuid, ref_id=meta_project_commit_id - ), - iteration_index=iteration_index, - workcopy_project_id=project_id, - workcopy_project_url=url_for( - "get_project", - project_id=project_id, - ), - ) - - -class ProjectIterationResultItem(ProjectIterationItem): - results: ExtractedResults - - @classmethod - def create_result( # pylint: disable=arguments-differ - cls, - meta_project_uuid, - meta_project_commit_id, - iteration_index, - project_id, - results, - url_for: Callable, - ): - return cls( - name=f"projects/{meta_project_uuid}/checkpoint/{meta_project_commit_id}/iterations/{iteration_index}/results", - parent=ParentMetaProjectRef( - project_id=meta_project_uuid, ref_id=meta_project_commit_id - ), - iteration_index=iteration_index, - workcopy_project_id=project_id, - results=results, - workcopy_project_url=url_for( - "get_project", - project_id=project_id, - ), - ) - - -# ROUTES ------------------------------------------------------------ - -routes = web.RouteTableDef() - - -@routes.get( - f"/{VTAG}/projects/{{project_uuid}}/checkpoint/{{ref_id}}/iterations", - name="list_project_iterations", -) -@login_required -@permission_required("project.snapshot.read") -async def list_project_iterations(request: web.Request) -> web.Response: - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735 - - # parse and validate request ---- - q = parse_query_parameters(request) - meta_project_uuid = q.project_uuid - meta_project_commit_id = q.ref_id - - url_for = create_url_for_function(request) - vc_repo = VersionControlForMetaModeling.create_from_request(request) - - # core function ---- - iterations_range = await _get_project_iterations_range( - vc_repo, - meta_project_uuid, - meta_project_commit_id, - offset=q.offset, - limit=q.limit, - ) - - if iterations_range.total_count == 0: - raise web.HTTPNotFound( - reason=f"No iterations found for project {meta_project_uuid=}/{meta_project_commit_id=}" - ) - - assert len(iterations_range.items) <= q.limit # nosec - - # parse and validate response ---- - page_items = [ - ProjectIterationItem.create_iteration( - meta_project_uuid, - meta_project_commit_id, - item.iteration_index, - item.project_id, - url_for, - ) - for item in iterations_range.items - ] - - page = Page[ProjectIterationItem].model_validate( - paginate_data( - chunk=page_items, - request_url=request.url, - total=iterations_range.total_count, - limit=q.limit, - offset=q.offset, - ) - ) - return web.Response( - text=page.model_dump_json(**RESPONSE_MODEL_POLICY), - content_type="application/json", - ) - - -# NOTE: Enable when create_or_get_project_iterations is implemented -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735 -# -# @routes.post( -@permission_required("project.snapshot.create") -async def create_project_iteration(request: web.Request) -> web.Response: - q = parse_query_parameters(request) - meta_project_uuid = q.project_uuid - meta_project_commit_id = q.ref_id - - url_for = create_url_for_function(request) - vc_repo = VersionControlForMetaModeling.create_from_request(request) - - # core function ---- - project_iterations = await create_or_get_project_iterations( - vc_repo, meta_project_uuid, meta_project_commit_id - ) - - # parse and validate response ---- - iterations_items = [ - ProjectIterationItem.create_iteration( - meta_project_uuid, - meta_project_commit_id, - item.iteration_index, - item.project_id, - url_for, - ) - for item in project_iterations - ] - - return envelope_json_response(iterations_items, web.HTTPCreated) - - -@routes.get( - f"/{VTAG}/projects/{{project_uuid}}/checkpoint/{{ref_id}}/iterations/-/results", - name="list_project_iterations_results", -) -@login_required -@permission_required("project.snapshot.read") -async def list_project_iterations_results( - request: web.Request, -) -> web.Response: - # parse and validate request ---- - q = parse_query_parameters(request) - meta_project_uuid = q.project_uuid - meta_project_commit_id = q.ref_id - - url_for = create_url_for_function(request) - vc_repo = VersionControlForMetaModeling.create_from_request(request) - - # core function ---- - iterations_range = await _get_project_iterations_range( - vc_repo, - meta_project_uuid, - meta_project_commit_id, - offset=q.offset, - limit=q.limit, - ) - - if iterations_range.total_count == 0: - raise web.HTTPNotFound( - reason=f"No iterations found for projects/{meta_project_uuid}/checkpoint/{meta_project_commit_id}" - ) - - assert len(iterations_range.items) <= q.limit # nosec - - # get every project from the database and extract results - _prj_data = {} - for item in iterations_range.items: - prj = await vc_repo.get_project(f"{item.project_id}", include=["workbench"]) - _prj_data[item.project_id] = prj["workbench"] - - def _get_project_results(project_id) -> ExtractedResults: - return extract_project_results(_prj_data[project_id]) - - # parse and validate response ---- - page_items = [ - ProjectIterationResultItem.create_result( - meta_project_uuid, - meta_project_commit_id, - item.iteration_index, - item.project_id, - _get_project_results(item.project_id), - url_for, - ) - for item in iterations_range.items - ] - - page = Page[ProjectIterationResultItem].model_validate( - paginate_data( - chunk=page_items, - request_url=request.url, - total=iterations_range.total_count, - limit=q.limit, - offset=q.offset, - ) - ) - return web.Response( - text=page.model_dump_json(**RESPONSE_MODEL_POLICY), - content_type="application/json", - ) diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py deleted file mode 100644 index 4d271a5c9f7..00000000000 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py +++ /dev/null @@ -1,344 +0,0 @@ -""" Implements parts of the pre-run stage that iterates a meta-project and versions every iteration - -""" - -import itertools -import logging -import re -from collections.abc import Iterator -from copy import deepcopy -from typing import Any, Literal, Optional - -from aiohttp import web -from common_library.json_serialization import json_dumps -from models_library.basic_types import KeyIDStr, SHA1Str -from models_library.function_services_catalog import is_iterator_service -from models_library.projects import ProjectID -from models_library.projects_nodes import Node, OutputID, OutputTypes -from models_library.projects_nodes_io import NodeID -from models_library.services import ServiceMetaDataPublished -from pydantic import BaseModel, ValidationError -from pydantic.fields import Field -from pydantic.types import PositiveInt - -from .._constants import RQ_PRODUCT_KEY -from ..projects.models import ProjectDict -from ..utils import compute_sha1_on_small_dataset, now_str -from ..version_control.errors import UserUndefinedError -from ..version_control.models import CommitID -from . import _function_nodes -from ._version_control import VersionControlForMetaModeling - -_logger = logging.getLogger(__name__) - - -NodesDict = dict[NodeID, Node] -NodeOutputsDict = dict[OutputID, OutputTypes] -Parameters = tuple[NodeOutputsDict] -_ParametersNodesPair = tuple[Parameters, NodesDict] - - -def _compute_params_checksum(parameters: Parameters) -> SHA1Str: - # NOTE: parameters are within a project's dataset which can - # be considered small (based on test_compute_sh1_on_small_dataset) - return compute_sha1_on_small_dataset(parameters) - - -def _build_project_iterations(project_nodes: NodesDict) -> list[_ParametersNodesPair]: - """Builds changing instances (i.e. iterations) of the meta-project - - This interface only knows about project/node models and parameters - """ - - # select iterable nodes - iterable_nodes_defs: list[ - ServiceMetaDataPublished - ] = [] # schemas of iterable nodes - iterable_nodes: list[Node] = [] # iterable nodes - iterable_nodes_ids: list[NodeID] = [] - - for node_id, node in project_nodes.items(): - if is_iterator_service(node.key): - node_def = _function_nodes.catalog.get_metadata(node.key, node.version) - # save - iterable_nodes_defs.append(node_def) - iterable_nodes.append(node) - iterable_nodes_ids.append(node_id) - - # for each iterable node create generator - nodes_generators = [] - - for node, node_def in zip(iterable_nodes, iterable_nodes_defs): - assert node.inputs # nosec - assert node_def.inputs # nosec - - node_call = _function_nodes.catalog.get_implementation(node.key, node.version) - assert node_call # nosec - g = node_call( - **{f"{name}": node.inputs[KeyIDStr(name)] for name in node_def.inputs} - ) - assert isinstance(g, Iterator) # nosec - nodes_generators.append(g) - - updated_nodes_per_iter: list[NodesDict] = [] - parameters_per_iter: list[tuple[NodeOutputsDict]] = [] - - for parameters in itertools.product(*nodes_generators): - # Q: what if iter are infinite? - # Q: preview & crop iterations? - - node_results: NodeOutputsDict - updated_nodes: NodesDict = {} - - for node_results, node_def, node_id in zip( - parameters, iterable_nodes_defs, iterable_nodes_ids - ): - assert node_def.outputs # nosec - assert 1 <= len(node_results) <= len(node_def.outputs) # nosec - - # override outputs with the parametrization results - _iter_node = deepcopy(project_nodes[node_id]) - _iter_node.outputs = _iter_node.outputs or {} - _iter_node.outputs.update(node_results) - - # NOTE: Replacing iter_node by a param_node, it avoid re-running matching iterations - # Currently it does not work because front-end needs to change - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735 - # - updated_nodes[node_id] = _iter_node - - parameters_per_iter.append(parameters) - updated_nodes_per_iter.append(updated_nodes) - - return list(zip(parameters_per_iter, updated_nodes_per_iter)) - - -def extract_parameters( - vc_repo: VersionControlForMetaModeling, - project_uuid: ProjectID, - commit_id: CommitID, -) -> Parameters: - raise NotImplementedError - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735 - - -# DOMAIN MODEL for project iteration ------------------------------------------------------------ - -IterationID = PositiveInt - - -class ProjectIteration(BaseModel): - """ - - Keeps a reference of the version: vc repo/commit and workcopy - - Keeps a reference of the interation: order, parameters (for the moment, only hash), ... - - - de/serializes from/to a vc tag - """ - - # version-control info - repo_id: int | None = None - repo_commit_id: CommitID = Field( - ..., - description="this id makes it unique but does not guarantees order. See iter_index for that", - ) - - # iteration info - iteration_index: IterationID = Field( - ..., - description="Index that allows iterations to be sortable", - ) - total_count: int | Literal["unbound"] = "unbound" - parameters_checksum: SHA1Str = Field(...) - - @classmethod - def from_tag_name( - cls, tag_name: str, *, return_none_if_fails: bool = False - ) -> Optional["ProjectIteration"]: - """Parses iteration info from tag name""" - try: - return cls.model_validate(parse_iteration_tag_name(tag_name)) - except ValidationError as err: - if return_none_if_fails: - _logger.debug("%s", f"{err=}") - return None - raise - - def to_tag_name(self) -> str: - """Composes unique tag name for this iteration""" - return compose_iteration_tag_name( - repo_commit_id=self.repo_commit_id, - iteration_index=self.iteration_index, - total_count=self.total_count, - parameters_checksum=self.parameters_checksum, - ) - - -# NOTE: compose_/parse_ functions are basically serialization functions for ProjectIteration -# into/from string tags. An alternative approach would be simply using json.dump/load -# but we should guarantee backwards compatibilty with old tags -def compose_iteration_tag_name( - repo_commit_id: CommitID, - iteration_index: IterationID, - total_count: int | str, - parameters_checksum: SHA1Str, -) -> str: - """Composes unique tag name for iter_index-th iteration of repo_commit_id out of total_count""" - return f"iteration:{repo_commit_id}/{iteration_index}/{total_count}/{parameters_checksum}" - - -def parse_iteration_tag_name(name: str) -> dict[str, Any]: - if m := re.match( - r"^iteration:(?P\d+)/(?P\d+)/(?P-*\d+)/(?P.*)$", - name, - ): - return m.groupdict() - return {} - - -# GET/CREATE iterations ------------------------------------------------------------ - - -async def get_or_create_runnable_projects( - request: web.Request, - project_uuid: ProjectID, -) -> tuple[list[ProjectID], list[CommitID]]: - """ - Returns ids and refid of projects that can run - If project_uuid is a std-project, then it returns itself - If project_uuid is a meta-project, then it returns iterations - """ - - vc_repo = VersionControlForMetaModeling.create_from_request(request) - assert vc_repo.user_id # nosec - product_name = request[RQ_PRODUCT_KEY] - - try: - project: ProjectDict = await vc_repo.get_project(str(project_uuid)) - except UserUndefinedError as err: - raise web.HTTPForbidden(reason="Unauthenticated request") from err - - project_nodes: dict[NodeID, Node] = { - nid: Node.model_validate(n) for nid, n in project["workbench"].items() - } - - # init returns - runnable_project_vc_commits: list[CommitID] = [] - runnable_project_ids: list[ProjectID] = [ - project_uuid, - ] - - # auto-commit - # because it will run in parallel -> needs an independent working copy - repo_id = await vc_repo.get_repo_id(project_uuid) - if repo_id is None: - repo_id = await vc_repo.init_repo(project_uuid) - - main_commit_id = await vc_repo.commit( - repo_id, - tag=f"auto:main/{project_uuid}", - message=f"auto-commit {now_str()}", - ) - runnable_project_vc_commits.append(main_commit_id) - - # std-project - is_meta_project = any( - is_iterator_service(node.key) and not node.outputs - for node in project_nodes.values() - ) - if not is_meta_project: - return runnable_project_ids, runnable_project_vc_commits - - # meta-project: resolve project iterations - runnable_project_ids = [] - runnable_project_vc_commits = [] - - iterations = _build_project_iterations(project_nodes) - _logger.debug( - "Project %s with %s parameters, produced %s variants", - project_uuid, - len(iterations[0]) if iterations else 0, - len(iterations), - ) - - # Each iteration generates a set of 'parameters' - # - parameters are set in the corresponding outputs of the meta-nodes - # - parameters: Parameters - updated_nodes: NodesDict - total_count = len(iterations) - original_name = project["name"] - - for iteration_index, (parameters, updated_nodes) in enumerate(iterations, start=1): - _logger.debug( - "Creating snapshot of project %s with parameters=%s [%s]", - f"{project_uuid=}", - f"{parameters=}", - f"{updated_nodes=}", - ) - - project["name"] = f"{original_name}/{iteration_index}" - project["workbench"].update( - { - # converts model in dict patching first thumbnail - nid: n.model_copy(update={"thumbnail": n.thumbnail or ""}).model_dump( - by_alias=True, exclude_unset=True - ) - for nid, n in updated_nodes.items() - } - ) - - project_iteration = ProjectIteration( - repo_id=repo_id, - repo_commit_id=main_commit_id, - iteration_index=iteration_index, - total_count=total_count, - parameters_checksum=_compute_params_checksum(parameters), - ) - - # tag to identify this iteration - branch_name = tag_name = project_iteration.to_tag_name() - - commit_id = await vc_repo.create_workcopy_and_branch_from_commit( - repo_id, - start_commit_id=main_commit_id, - project=project, - branch_name=branch_name, - tag_name=tag_name, - tag_message=json_dumps(parameters), - product_name=product_name, - ) - - workcopy_project_id = await vc_repo.get_workcopy_project_id(repo_id, commit_id) - - runnable_project_ids.append(ProjectID(workcopy_project_id)) - runnable_project_vc_commits.append(commit_id) - - return runnable_project_ids, runnable_project_vc_commits - - -async def get_runnable_projects_ids( - request: web.Request, - project_uuid: ProjectID, -) -> list[ProjectID]: - vc_repo = VersionControlForMetaModeling.create_from_request(request) - assert vc_repo.user_id # nosec - - project: ProjectDict = await vc_repo.get_project(str(project_uuid)) - assert project["uuid"] == str(project_uuid) # nosec - project_nodes: dict[NodeID, Node] = { - nid: Node.model_validate(n) for nid, n in project["workbench"].items() - } - - # init returns - runnable_project_ids: list[ProjectID] = [] - - # std-project - is_meta_project = any( - is_iterator_service(node.key) for node in project_nodes.values() - ) - if not is_meta_project: - runnable_project_ids.append(project_uuid) - return runnable_project_ids - - raise NotImplementedError - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735 diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_projects.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_projects.py deleted file mode 100644 index cd2a09b195b..00000000000 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_projects.py +++ /dev/null @@ -1,99 +0,0 @@ -""" Access to the to projects module - - - Adds a middleware to intercept /projects/* requests - - Implements a MetaProjectRunPolicy policy (see director_v2_abc.py) to define how meta-projects run - -""" - - -import logging -import re - -from aiohttp import web -from aiohttp.typedefs import Handler -from models_library.basic_regex import UUID_RE -from models_library.projects import ProjectID - -from .._meta import API_VTAG as VTAG -from ..director_v2.api import AbstractProjectRunPolicy -from ..projects._crud_handlers import RQ_REQUESTED_REPO_PROJECT_UUID_KEY -from ..version_control.models import CommitID -from ._iterations import get_or_create_runnable_projects, get_runnable_projects_ids -from ._version_control import VersionControlForMetaModeling - -_logger = logging.getLogger(__name__) - - -# SEE https://github.com/ITISFoundation/osparc-simcore/blob/master/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml#L8563 -URL_PATTERN = re.compile(rf"^\/{VTAG}\/projects\/({UUID_RE})[\/]{{0,1}}") - - -def _match_project_id(request: web.Request): - # OAS uses both 'project_id' and also 'project_uuid' :-( - for path_param in ("project_id", "project_uuid"): - if project_id := request.match_info.get(path_param): - return project_id, path_param - return None, None - - -@web.middleware -async def projects_redirection_middleware(request: web.Request, handler: Handler): - """Intercepts /projects/{project_uuid}* requests and redirect them to the copy @HEAD - - Any given project has a unique identifier 'project_id' but, when activated, - it also has a version history (denoted 'checkpoints' in the API). - - In that case, GET /projects/1234 shall refer to the HEAD version of the project - with id 1234, also denoted the project's working copy (in short 'workcopy project') - - All metaprojects are versioned so this middleware intercepts calls to GET project - and ensures that the response body includes the correct workcopy of the requested - project. - """ - - if URL_PATTERN.match(f"{request.rel_url}"): - # - # WARNING: because hierarchical design is not guaranteed, we find ourselves with - # entries like /v0/computations/{project_id}:start which might also neeed - # indirection - # - - project_id, path_param = _match_project_id(request) - if project_id and path_param: - vc_repo = VersionControlForMetaModeling.create_from_request(request) - - if repo_id := await vc_repo.get_repo_id(ProjectID(project_id)): - # Changes resolved project_id parameter with working copy instead - workcopy_project_id = await vc_repo.get_workcopy_project_id(repo_id) - request.match_info[path_param] = f"{workcopy_project_id}" - - if f"{workcopy_project_id}" != f"{project_id}": - request[RQ_REQUESTED_REPO_PROJECT_UUID_KEY] = workcopy_project_id - _logger.debug( - "Redirecting request with %s to working copy %s", - f"{project_id=}", - f"{workcopy_project_id=}", - ) - - response = await handler(request) - - return response - - -class MetaProjectRunPolicy(AbstractProjectRunPolicy): - async def get_runnable_projects_ids( - self, - request: web.Request, - project_uuid: ProjectID, - ) -> list[ProjectID]: - return await get_runnable_projects_ids(request, project_uuid) - - async def get_or_create_runnable_projects( - self, - request: web.Request, - project_uuid: ProjectID, - ) -> tuple[list[ProjectID], list[CommitID]]: - return await get_or_create_runnable_projects(request, project_uuid) - - -meta_project_policy = MetaProjectRunPolicy() diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py deleted file mode 100644 index 150c2b8f680..00000000000 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py +++ /dev/null @@ -1,114 +0,0 @@ -""" Access to the to projects module - - - Adds a middleware to intercept /projects/* requests - - Implements a MetaProjectRunPolicy policy (see director_v2_abc.py) to define how meta-projects run - -""" - - -import logging -from typing import Annotated, Any - -from models_library.projects_nodes import OutputsDict -from models_library.projects_nodes_io import NodeIDStr -from pydantic import BaseModel, ConfigDict, Field - -_logger = logging.getLogger(__name__) - - -ProgressInt = Annotated[int, Field(ge=0, le=100)] - - -class ExtractedResults(BaseModel): - progress: dict[NodeIDStr, ProgressInt] = Field( - ..., description="Progress in each computational node" - ) - labels: dict[NodeIDStr, str] = Field( - ..., description="Maps captured node with a label" - ) - values: dict[NodeIDStr, OutputsDict] = Field( - ..., description="Captured outputs per node" - ) - model_config = ConfigDict( - json_schema_extra={ - "example": { - # sample with 2 computational services, 2 data sources (iterator+parameter) and 2 observers (probes) - "progress": { - "4c08265a-427b-4ac3-9eab-1d11c822ada4": 0, - "e33c6880-1b1d-4419-82d7-270197738aa9": 100, - }, - "labels": { - "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": "Integer iterator", - "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": "Probe Sensor - Integer", - "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": "Probe Sensor - Integer_2", - "d76fca06-f050-4790-88a8-0aac10c87b39": "Boolean Parameter", - }, - "values": { - "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": { - "out_1": 1, - "out_2": [3, 4], - }, - "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": {"in_1": 7}, - "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": {"in_1": 1}, - "d76fca06-f050-4790-88a8-0aac10c87b39": {"out_1": True}, - }, - } - } - ) - - -def extract_project_results(workbench: dict[str, Any]) -> ExtractedResults: - """Extracting results from a project's workbench section (i.e. pipeline). Specifically: - - - data sources (e.g. outputs from iterators, paramters) - - progress of evaluators (e.g. a computational service) - - data observers (basically inputs from probes) - - NOTE: all projects produces from iterations preserve the same node uuids so - running this extraction on all projects from a iterations allows to create a - row for a table of results - """ - # nodeid -> % progress - progress = {} - # nodeid -> label (this map is necessary because cannot guaratee labels to be unique) - labels = {} - # nodeid -> { port: value , ...} # results have two levels deep: node/port - results = {} - - for noid, node in workbench.items(): - key_parts = node["key"].split("/") - - # evaluate progress - if "comp" in key_parts: - progress[noid] = node.get("progress", 0) - - # evaluate results - if "probe" in key_parts: - label = node["label"] - values = {} - for port_name, node_input in node["inputs"].items(): - try: - values[port_name] = workbench[node_input["nodeUuid"]]["outputs"][ - node_input["output"] - ] - except KeyError: - # if not run, we know name but NOT value - values[port_name] = "n/a" - results[noid], labels[noid] = values, label - - elif "data-iterator" in key_parts: - label = node["label"] - try: - values = node["outputs"] # {oid: value, ...} - except KeyError: - # if not iterated, we do not know NEITHER name NOT values - values = {} - results[noid], labels[noid] = values, label - - elif "parameter" in key_parts: - label = node["label"] - values = node["outputs"] - results[noid], labels[noid] = values, label - - res = ExtractedResults(progress=progress, labels=labels, values=results) - return res diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_version_control.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_version_control.py deleted file mode 100644 index 2f0d73d104e..00000000000 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_version_control.py +++ /dev/null @@ -1,193 +0,0 @@ -""" Access to the to version_control add-on - -""" - -import logging -from types import SimpleNamespace -from typing import cast - -from aiopg.sa.result import RowProxy -from models_library.products import ProductName -from models_library.projects import ProjectIDStr -from models_library.utils.fastapi_encoders import jsonable_encoder -from simcore_postgres_database.models.projects_to_products import projects_to_products - -from ..projects.models import ProjectDict -from ..version_control.db import VersionControlRepository -from ..version_control.errors import UserUndefinedError -from ..version_control.models import CommitID, TagProxy -from ..version_control.vc_changes import ( - compute_workbench_checksum, - eval_workcopy_project_id, -) -from ..version_control.vc_tags import compose_workcopy_project_tag_name - -_logger = logging.getLogger(__name__) - - -class VersionControlForMetaModeling(VersionControlRepository): - async def get_workcopy_project_id( - self, repo_id: int, commit_id: int | None = None - ) -> ProjectIDStr: - async with self.engine.acquire() as conn: - if commit_id is None: - commit = await self._get_HEAD_commit(repo_id, conn) - assert commit # nosec - commit_id = commit.id - assert commit_id - - return await self._fetch_workcopy_project_id(repo_id, commit_id, conn) - - async def get_workcopy_project(self, repo_id: int, commit_id: int) -> ProjectDict: - async with self.engine.acquire() as conn: - project_id = await self._fetch_workcopy_project_id(repo_id, commit_id, conn) - project = await self.ProjectsOrm(conn).set_filter(uuid=project_id).fetch() - assert project # nosec - return dict(project.items()) - - async def get_project( - self, project_id: ProjectIDStr, *, include: list[str] | None = None - ) -> ProjectDict: - async with self.engine.acquire() as conn: - if self.user_id is None: - raise UserUndefinedError - - if include is None: - include = [ - "type", - "uuid", - "name", - "description", - "thumbnail", - "prj_owner", - "access_rights", - "workbench", - "ui", - "classifiers", - "dev", - "quality", - "published", - "hidden", - ] - - project = ( - await self.ProjectsOrm(conn) - .set_filter(uuid=f"{project_id}", prj_owner=self.user_id) - .fetch(include) - ) - assert project # nosec - project_as_dict = dict(project.items()) - - # ------------- - # NOTE: hack to avoid validation error. Revisit when models_library.utils.pydantic_models_factory is - # used to create a reliable project's model to validate http API - if "thumbnail" in project_as_dict: - project_as_dict["thumbnail"] = project_as_dict["thumbnail"] or "" - # --------------- - return project_as_dict - - async def create_workcopy_and_branch_from_commit( - self, - repo_id: int, - start_commit_id: int, - project: ProjectDict, - branch_name: str, - tag_name: str, - tag_message: str, - product_name: ProductName, - ) -> CommitID: - """Creates a new branch with an explicit working copy 'project' on 'start_commit_id'""" - IS_INTERNAL_OPERATION = True - - # NOTE: this avoid having non-compatible types embedded in the dict that - # make operations with the db to fail - # SEE https://fastapi.tiangolo.com/tutorial/encoder/ - project = jsonable_encoder(project, sqlalchemy_safe=True) - - async with self.engine.acquire() as conn: - # existance check prevents errors later - if ( - existing_tag := await self.TagsOrm(conn) - .set_filter(name=tag_name) - .fetch() - ): - return cast(CommitID, existing_tag.commit_id) - - # get workcopy for start_commit_id and update with 'project' - repo = ( - await self.ReposOrm(conn).set_filter(id=repo_id).fetch("project_uuid") - ) - assert repo # nosec - - async with conn.begin(): - # take snapshot of forced project - snapshot_checksum = compute_workbench_checksum(project["workbench"]) - - await self._upsert_snapshot( - snapshot_checksum, SimpleNamespace(**project), conn - ) - - # commit new snapshot in history - commit_id = await self.CommitsOrm(conn).insert( - repo_id=repo_id, - parent_commit_id=start_commit_id, - message=tag_message, - snapshot_checksum=snapshot_checksum, - ) - assert commit_id # nosec - assert isinstance(commit_id, int) # nosec - - # creates unique identifier for variant - project["uuid"] = eval_workcopy_project_id( - repo.project_uuid, snapshot_checksum - ) - project["hidden"] = True - - # creates runnable version in project - await self.ProjectsOrm(conn).insert(**project) - - await conn.execute( - projects_to_products.insert().values( - project_uuid=project["uuid"], product_name=product_name - ) - ) - - # create branch and set head to last commit_id - branch = await self.BranchesOrm(conn).insert( - returning_cols="id head_commit_id", - repo_id=repo_id, - head_commit_id=commit_id, - name=branch_name, - ) - assert isinstance(branch, RowProxy) # nosec - - for tag in [ - tag_name, - compose_workcopy_project_tag_name(project["uuid"]), - ]: - await self.TagsOrm(conn).insert( - repo_id=repo_id, - commit_id=commit_id, - name=tag, - message=tag_message if tag == tag_name else None, - hidden=IS_INTERNAL_OPERATION, - ) - - return cast(CommitID, branch.head_commit_id) - - async def get_children_tags( - self, repo_id: int, commit_id: int - ) -> list[list[TagProxy]]: - async with self.engine.acquire() as conn: - commits = ( - await self.CommitsOrm(conn) - .set_filter(repo_id=repo_id, parent_commit_id=commit_id) - .fetch_all(returning_cols="id") - ) - tags = [] - for commit in commits: - tags_in_commit = ( - await self.TagsOrm(conn).set_filter(commit_id=commit.id).fetch_all() - ) - tags.append(tags_in_commit) - return tags diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/plugin.py b/services/web/server/src/simcore_service_webserver/meta_modeling/plugin.py deleted file mode 100644 index c17ac9fdbf4..00000000000 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/plugin.py +++ /dev/null @@ -1,41 +0,0 @@ -""" Meta-modeling app module - - Manages version control of studies, both the project document and the associated data - -""" -import logging - -from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup - -from .._constants import APP_SETTINGS_KEY -from ..director_v2.api import get_project_run_policy, set_project_run_policy -from . import _handlers -from ._projects import meta_project_policy, projects_redirection_middleware - -_logger = logging.getLogger(__name__) - - -@app_module_setup( - __name__, - ModuleCategory.ADDON, - depends=[ - "simcore_service_webserver.projects", - ], - settings_name="WEBSERVER_META_MODELING", - logger=_logger, -) -def setup_meta_modeling(app: web.Application): - assert app[APP_SETTINGS_KEY].WEBSERVER_META_MODELING # nosec - - _logger.warning( - "'meta_modeling' plugin is STILL UNDER DEVELOPMENT and should not be used in production." - "Can only be activated with WEBSERVER_DEV_FEATURES_ENABLED=1" - ) - - app.add_routes(_handlers.routes) - app.middlewares.append(projects_redirection_middleware) - - # Overrides run-policy from directorv2 - assert get_project_run_policy(app) # nosec - set_project_run_policy(app, meta_project_policy) diff --git a/services/web/server/src/simcore_service_webserver/models.py b/services/web/server/src/simcore_service_webserver/models.py index 48ffd369586..0b816268baa 100644 --- a/services/web/server/src/simcore_service_webserver/models.py +++ b/services/web/server/src/simcore_service_webserver/models.py @@ -3,7 +3,7 @@ from pydantic import Field from servicelib.request_keys import RQT_USERID_KEY -from ._constants import RQ_PRODUCT_KEY +from .constants import RQ_PRODUCT_KEY class RequestContext(RequestParameters): diff --git a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py index 4193c6fce7f..7c3925b8c50 100644 --- a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py +++ b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py @@ -18,7 +18,7 @@ from servicelib.rabbitmq import RabbitMQClient from servicelib.utils import logged_gather -from ..projects import projects_service +from ..projects import _projects_service from ..projects.exceptions import ProjectNotFoundError from ..rabbitmq import get_rabbitmq_client from ..socketio.messages import ( @@ -30,7 +30,7 @@ send_message_to_user, ) from ..socketio.models import WebSocketNodeProgress, WebSocketProjectProgress -from ..wallets import api as wallets_api +from ..wallets import api as wallets_service from ._rabbitmq_consumers_common import SubcribeArgumentsTuple, subscribe_to_rabbitmq _logger = logging.getLogger(__name__) @@ -42,7 +42,7 @@ async def _convert_to_node_update_event( app: web.Application, message: ProgressRabbitMessageNode ) -> SocketMessageDict | None: try: - project = await projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( app, f"{message.project_id}", message.user_id ) if f"{message.node_id}" in project["workbench"]: @@ -127,7 +127,7 @@ async def _events_message_parser(app: web.Application, data: bytes) -> bool: async def _osparc_credits_message_parser(app: web.Application, data: bytes) -> bool: rabbit_message = TypeAdapter(WalletCreditsMessage).validate_json(data) - wallet_groups = await wallets_api.list_wallet_groups_with_read_access_by_wallet( + wallet_groups = await wallets_service.list_wallet_groups_with_read_access_by_wallet( app, wallet_id=rabbit_message.wallet_id ) rooms_to_notify: Generator[GroupID, None, None] = ( diff --git a/services/web/server/src/simcore_service_webserver/payments/_events.py b/services/web/server/src/simcore_service_webserver/payments/_events.py index fbc4ebc2047..e9f63d26c20 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_events.py +++ b/services/web/server/src/simcore_service_webserver/payments/_events.py @@ -6,7 +6,7 @@ from aiohttp import web -from ..products.api import list_products +from ..products import products_service from ..products.errors import BelowMinimumPaymentError from .settings import get_plugin_settings @@ -16,7 +16,7 @@ async def validate_prices_in_product_settings_on_startup(app: web.Application): payment_settings = get_plugin_settings(app) - for product in list_products(app): + for product in products_service.list_products(app): if product.min_payment_amount_usd is not None: if ( product.min_payment_amount_usd diff --git a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py index 3b2bcf8ede8..135eaf41a9e 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py +++ b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py @@ -1,7 +1,7 @@ import datetime import logging -import simcore_postgres_database.errors as db_errors +import simcore_postgres_database.aiopg_errors as db_errors import sqlalchemy as sa from aiohttp import web from aiopg.sa.result import ResultProxy diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py index fbe69e07f83..488189a81a5 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py +++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py @@ -24,7 +24,7 @@ from yarl import URL from ..db.plugin import get_database_engine -from ..products.api import get_product_stripe_info +from ..products import products_service from ..resource_usage.service import add_credits_to_wallet from ..users.api import get_user_display_and_id_names, get_user_invoice_address from ..wallets.api import get_wallet_by_user, get_wallet_with_permissions_by_user @@ -296,7 +296,9 @@ async def init_creation_of_wallet_payment( user_invoice_address = await get_user_invoice_address(app, user_id=user_id) # stripe info - product_stripe_info = await get_product_stripe_info(app, product_name=product_name) + product_stripe_info = await products_service.get_product_stripe_info( + app, product_name=product_name + ) settings: PaymentsSettings = get_plugin_settings(app) payment_inited: WalletPaymentInitiated @@ -378,7 +380,9 @@ async def pay_with_payment_method( assert user_wallet.wallet_id == wallet_id # nosec # stripe info - product_stripe_info = await get_product_stripe_info(app, product_name=product_name) + product_stripe_info = await products_service.get_product_stripe_info( + app, product_name=product_name + ) # user info user = await get_user_display_and_id_names(app, user_id=user_id) diff --git a/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py b/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py index 359f8cbf4cb..d799d04fe6f 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py +++ b/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py @@ -4,11 +4,12 @@ from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.emails import LowerCaseEmailStr from models_library.payments import InvoiceDataGet, UserInvoiceAddress -from models_library.products import CreditResultGet, ProductName, ProductStripeInfoGet +from models_library.products import ProductName from models_library.users import UserID from servicelib.rabbitmq import RPCRouter -from ..products.api import get_credit_amount, get_product_stripe_info +from ..products import products_service +from ..products.models import CreditResult from ..rabbitmq import get_rabbitmq_rpc_server from ..users.api import get_user_display_and_id_names, get_user_invoice_address @@ -23,10 +24,10 @@ async def get_invoice_data( dollar_amount: Decimal, product_name: ProductName, ) -> InvoiceDataGet: - credit_result_get: CreditResultGet = await get_credit_amount( + credit_result: CreditResult = await products_service.get_credit_amount( app, dollar_amount=dollar_amount, product_name=product_name ) - product_stripe_info_get: ProductStripeInfoGet = await get_product_stripe_info( + product_stripe_info = await products_service.get_product_stripe_info( app, product_name=product_name ) user_invoice_address: UserInvoiceAddress = await get_user_invoice_address( @@ -35,9 +36,9 @@ async def get_invoice_data( user_info = await get_user_display_and_id_names(app, user_id=user_id) return InvoiceDataGet( - credit_amount=credit_result_get.credit_amount, - stripe_price_id=product_stripe_info_get.stripe_price_id, - stripe_tax_rate_id=product_stripe_info_get.stripe_tax_rate_id, + credit_amount=credit_result.credit_amount, + stripe_price_id=product_stripe_info.stripe_price_id, + stripe_tax_rate_id=product_stripe_info.stripe_tax_rate_id, user_invoice_address=user_invoice_address, user_display_name=user_info.full_name, user_email=LowerCaseEmailStr(user_info.email), diff --git a/services/web/server/src/simcore_service_webserver/payments/plugin.py b/services/web/server/src/simcore_service_webserver/payments/plugin.py index 777ba5b599a..3e8bbecc56e 100644 --- a/services/web/server/src/simcore_service_webserver/payments/plugin.py +++ b/services/web/server/src/simcore_service_webserver/payments/plugin.py @@ -6,11 +6,11 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from simcore_service_webserver.rabbitmq import setup_rabbitmq -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..db.plugin import setup_db from ..products.plugin import setup_products +from ..rabbitmq import setup_rabbitmq from ..users.plugin import setup_users from . import _events, _rpc_invoice from ._tasks import create_background_task_to_fake_payment_completion diff --git a/services/web/server/src/simcore_service_webserver/payments/settings.py b/services/web/server/src/simcore_service_webserver/payments/settings.py index ef825a5c1e9..1d424000d57 100644 --- a/services/web/server/src/simcore_service_webserver/payments/settings.py +++ b/services/web/server/src/simcore_service_webserver/payments/settings.py @@ -22,7 +22,7 @@ URLPart, ) -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class PaymentsSettings(BaseCustomSettings, MixinServiceSettings): diff --git a/services/web/server/src/simcore_service_webserver/products/_api.py b/services/web/server/src/simcore_service_webserver/products/_api.py deleted file mode 100644 index ed5b08b5ee1..00000000000 --- a/services/web/server/src/simcore_service_webserver/products/_api.py +++ /dev/null @@ -1,168 +0,0 @@ -from decimal import Decimal -from pathlib import Path -from typing import cast - -import aiofiles -from aiohttp import web -from models_library.products import CreditResultGet, ProductName, ProductStripeInfoGet -from simcore_postgres_database.utils_products_prices import ProductPriceInfo - -from .._constants import APP_PRODUCTS_KEY, RQ_PRODUCT_KEY -from .._resources import webserver_resources -from ._db import ProductRepository -from ._events import APP_PRODUCTS_TEMPLATES_DIR_KEY -from ._model import Product -from .errors import BelowMinimumPaymentError, ProductPriceNotDefinedError - - -def get_product_name(request: web.Request) -> str: - """Returns product name in request but might be undefined""" - product_name: str = request[RQ_PRODUCT_KEY] - return product_name - - -def get_product(app: web.Application, product_name: ProductName) -> Product: - product: Product = app[APP_PRODUCTS_KEY][product_name] - return product - - -def get_current_product(request: web.Request) -> Product: - """Returns product associated to current request""" - product_name: ProductName = get_product_name(request) - current_product: Product = get_product(request.app, product_name=product_name) - return current_product - - -def list_products(app: web.Application) -> list[Product]: - products: list[Product] = list(app[APP_PRODUCTS_KEY].values()) - return products - - -async def get_current_product_credit_price_info( - request: web.Request, -) -> ProductPriceInfo | None: - """Gets latest credit price for this product. - - NOTE: Contrary to other product api functions (e.g. get_current_product) this function - gets the latest update from the database. Otherwise, products are loaded - on startup and cached therefore in those cases would require a restart - of the service for the latest changes to take effect. - """ - current_product_name = get_product_name(request) - repo = ProductRepository.create_from_request(request) - return cast( # mypy: not sure why - ProductPriceInfo | None, - await repo.get_product_latest_price_info_or_none(current_product_name), - ) - - -async def get_credit_amount( - app: web.Application, - *, - dollar_amount: Decimal, - product_name: ProductName, -) -> CreditResultGet: - """For provided dollars and product gets credit amount. - - NOTE: Contrary to other product api functions (e.g. get_current_product) this function - gets the latest update from the database. Otherwise, products are loaded - on startup and cached therefore in those cases would require a restart - of the service for the latest changes to take effect. - - Raises: - ProductPriceNotDefinedError - BelowMinimumPaymentError - - """ - repo = ProductRepository.create_from_app(app) - price_info = await repo.get_product_latest_price_info_or_none(product_name) - if price_info is None or not price_info.usd_per_credit: - # '0 or None' should raise - raise ProductPriceNotDefinedError( - reason=f"Product {product_name} usd_per_credit is either not defined or zero" - ) - - if dollar_amount < price_info.min_payment_amount_usd: - raise BelowMinimumPaymentError( - amount_usd=dollar_amount, - min_payment_amount_usd=price_info.min_payment_amount_usd, - ) - - credit_amount = dollar_amount / price_info.usd_per_credit - return CreditResultGet(product_name=product_name, credit_amount=credit_amount) - - -async def get_product_stripe_info( - app: web.Application, *, product_name: ProductName -) -> ProductStripeInfoGet: - repo = ProductRepository.create_from_app(app) - product_stripe_info = await repo.get_product_stripe_info(product_name) - if ( - not product_stripe_info - or "missing!!" in product_stripe_info.stripe_price_id - or "missing!!" in product_stripe_info.stripe_tax_rate_id - ): - msg = f"Missing product stripe for product {product_name}" - raise ValueError(msg) - return cast(ProductStripeInfoGet, product_stripe_info) # mypy: not sure why - - -# -# helpers for get_product_template_path -# - - -def _themed(dirname: str, template: str) -> Path: - path: Path = webserver_resources.get_path(f"{Path(dirname) / template}") - return path - - -async def _get_content(request: web.Request, template_name: str): - repo = ProductRepository.create_from_request(request) - content = await repo.get_template_content(template_name) - if not content: - msg = f"Missing template {template_name} for product" - raise ValueError(msg) - return content - - -def _safe_get_current_product(request: web.Request) -> Product | None: - try: - product: Product = get_current_product(request) - return product - except KeyError: - return None - - -async def get_product_template_path(request: web.Request, filename: str) -> Path: - if product := _safe_get_current_product(request): - if template_name := product.get_template_name_for(filename): - template_dir: Path = request.app[APP_PRODUCTS_TEMPLATES_DIR_KEY] - template_path = template_dir / template_name - if not template_path.exists(): - # cache - content = await _get_content(request, template_name) - try: - async with aiofiles.open(template_path, "wt") as fh: - await fh.write(content) - except Exception: - # fails to write - if template_path.exists(): - template_path.unlink() - raise - - return template_path - - # check static resources under templates/ - if ( - template_path := _themed(f"templates/{product.name}", filename) - ) and template_path.exists(): - return template_path - - # If no product or template for product defined, we fall back to common templates - common_template = _themed("templates/common", filename) - if not common_template.exists(): - msg = f"{filename} is not part of the templates/common" - raise ValueError(msg) - - return common_template diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/products/_controller/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rest.py b/services/web/server/src/simcore_service_webserver/products/_controller/rest.py new file mode 100644 index 00000000000..77c72afe3b0 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rest.py @@ -0,0 +1,81 @@ +import logging + +from aiohttp import web +from models_library.api_schemas_webserver.products import ( + CreditPriceGet, + ProductGet, + ProductUIGet, +) +from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as + +from ..._meta import API_VTAG as VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _service, products_web +from .._repository import ProductRepository +from ..models import Product +from .rest_exceptions import handle_rest_requests_exceptions +from .rest_schemas import ProductsRequestContext, ProductsRequestParams + +routes = web.RouteTableDef() + + +_logger = logging.getLogger(__name__) + + +@routes.get(f"/{VTAG}/credits-price", name="get_current_product_price") +@login_required +@permission_required("product.price.read") +@handle_rest_requests_exceptions +async def _get_current_product_price(request: web.Request): + req_ctx = ProductsRequestContext.model_validate(request) + price_info = await products_web.get_current_product_credit_price_info(request) + + credit_price = CreditPriceGet( + product_name=req_ctx.product_name, + usd_per_credit=price_info.usd_per_credit if price_info else None, + min_payment_amount_usd=( + price_info.min_payment_amount_usd # type: ignore[arg-type] + if price_info + else None + ), + ) + return envelope_json_response(credit_price) + + +@routes.get(f"/{VTAG}/products/{{product_name}}", name="get_product") +@login_required +@permission_required("product.details.*") +@handle_rest_requests_exceptions +async def _get_product(request: web.Request): + req_ctx = ProductsRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(ProductsRequestParams, request) + + if path_params.product_name == "current": + product_name = req_ctx.product_name + else: + product_name = path_params.product_name + + product: Product = _service.get_product(request.app, product_name=product_name) + + assert "extra" in ProductGet.model_config # nosec + assert ProductGet.model_config["extra"] == "ignore" # nosec + data = ProductGet(**product.model_dump(), templates=[]) + return envelope_json_response(data) + + +@routes.get(f"/{VTAG}/products/current/ui", name="get_current_product_ui") +@login_required +@permission_required("product.ui.read") +@handle_rest_requests_exceptions +async def _get_current_product_ui(request: web.Request): + req_ctx = ProductsRequestContext.model_validate(request) + product_name = req_ctx.product_name + + ui = await _service.get_product_ui( + ProductRepository.create_from_request(request), product_name=product_name + ) + + data = ProductUIGet(product_name=product_name, ui=ui) + return envelope_json_response(data) diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rest_exceptions.py b/services/web/server/src/simcore_service_webserver/products/_controller/rest_exceptions.py new file mode 100644 index 00000000000..a9e8cb13f00 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rest_exceptions.py @@ -0,0 +1,26 @@ +from servicelib.aiohttp import status + +from ...constants import MSG_TRY_AGAIN_OR_SUPPORT +from ...exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ..errors import MissingStripeConfigError, ProductNotFoundError + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + ProductNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "{product_name} was not found", + ), + MissingStripeConfigError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + "{product_name} service is currently unavailable." + MSG_TRY_AGAIN_OR_SUPPORT, + ), +} + + +handle_rest_requests_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rest_schemas.py b/services/web/server/src/simcore_service_webserver/products/_controller/rest_schemas.py new file mode 100644 index 00000000000..6a4ac2100b1 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rest_schemas.py @@ -0,0 +1,26 @@ +import logging +from typing import Annotated, Literal + +from aiohttp import web +from models_library.basic_types import IDStr +from models_library.products import ProductName +from models_library.rest_base import RequestParameters, StrictRequestParameters +from models_library.users import UserID +from pydantic import Field +from servicelib.request_keys import RQT_USERID_KEY + +from ...constants import RQ_PRODUCT_KEY + +routes = web.RouteTableDef() + + +_logger = logging.getLogger(__name__) + + +class ProductsRequestContext(RequestParameters): + user_id: Annotated[UserID, Field(alias=RQT_USERID_KEY)] + product_name: Annotated[ProductName, Field(..., alias=RQ_PRODUCT_KEY)] + + +class ProductsRequestParams(StrictRequestParameters): + product_name: IDStr | Literal["current"] diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rpc.py b/services/web/server/src/simcore_service_webserver/products/_controller/rpc.py new file mode 100644 index 00000000000..852cf2e4f8c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rpc.py @@ -0,0 +1,38 @@ +from decimal import Decimal + +from aiohttp import web +from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE +from models_library.api_schemas_webserver.products import CreditResultRpcGet +from models_library.products import ProductName +from servicelib.rabbitmq import RPCRouter + +from ...constants import APP_SETTINGS_KEY +from ...rabbitmq import get_rabbitmq_rpc_server, setup_rabbitmq +from .. import _service +from .._models import CreditResult + +router = RPCRouter() + + +@router.expose() +async def get_credit_amount( + app: web.Application, + *, + dollar_amount: Decimal, + product_name: ProductName, +) -> CreditResultRpcGet: + credit_result: CreditResult = await _service.get_credit_amount( + app, dollar_amount=dollar_amount, product_name=product_name + ) + return CreditResultRpcGet.model_validate(credit_result, from_attributes=True) + + +async def _register_rpc_routes_on_startup(app: web.Application): + rpc_server = get_rabbitmq_rpc_server(app) + await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) + + +def setup_rpc(app: web.Application): + setup_rabbitmq(app) + if app[APP_SETTINGS_KEY].WEBSERVER_RABBITMQ: + app.on_startup.append(_register_rpc_routes_on_startup) diff --git a/services/web/server/src/simcore_service_webserver/products/_db.py b/services/web/server/src/simcore_service_webserver/products/_db.py deleted file mode 100644 index a59f6077dfc..00000000000 --- a/services/web/server/src/simcore_service_webserver/products/_db.py +++ /dev/null @@ -1,153 +0,0 @@ -import logging -from decimal import Decimal -from typing import AsyncIterator, NamedTuple - -import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import ResultProxy, RowProxy -from models_library.products import ProductName, ProductStripeInfoGet -from simcore_postgres_database.constants import QUANTIZE_EXP_ARG -from simcore_postgres_database.models.jinja2_templates import jinja2_templates -from simcore_postgres_database.utils_products_prices import ( - ProductPriceInfo, - get_product_latest_price_info_or_none, - get_product_latest_stripe_info, -) - -from ..db.base_repository import BaseRepository -from ..db.models import products -from ._model import Product - -_logger = logging.getLogger(__name__) - - -# -# REPOSITORY -# - -# NOTE: This also asserts that all model fields are in sync with sqlalchemy columns -_PRODUCTS_COLUMNS = [ - products.c.name, - products.c.display_name, - products.c.short_name, - products.c.host_regex, - products.c.support_email, - products.c.product_owners_email, - products.c.twilio_messaging_sid, - products.c.vendor, - products.c.issues, - products.c.manuals, - products.c.support, - products.c.login_settings, - products.c.registration_email_template, - products.c.max_open_studies_per_user, - products.c.group_id, -] - - -class PaymentFieldsTuple(NamedTuple): - enabled: bool - credits_per_usd: Decimal | None - min_payment_amount_usd: Decimal | None - - -async def get_product_payment_fields( - conn: SAConnection, product_name: ProductName -) -> PaymentFieldsTuple: - price_info = await get_product_latest_price_info_or_none( - conn, product_name=product_name - ) - if price_info is None or price_info.usd_per_credit == 0: - return PaymentFieldsTuple( - enabled=False, - credits_per_usd=None, - min_payment_amount_usd=None, - ) - - assert price_info.usd_per_credit > 0 - assert price_info.min_payment_amount_usd > 0 - - return PaymentFieldsTuple( - enabled=True, - credits_per_usd=Decimal(1 / price_info.usd_per_credit).quantize( - QUANTIZE_EXP_ARG - ), - min_payment_amount_usd=price_info.min_payment_amount_usd, - ) - - -async def iter_products(conn: SAConnection) -> AsyncIterator[ResultProxy]: - """Iterates on products sorted by priority i.e. the first is considered the default""" - async for row in conn.execute( - sa.select(*_PRODUCTS_COLUMNS).order_by(products.c.priority) - ): - assert row # nosec - yield row - - -class ProductRepository(BaseRepository): - async def get_product(self, product_name: str) -> Product | None: - async with self.engine.acquire() as conn: - result: ResultProxy = await conn.execute( - sa.select(*_PRODUCTS_COLUMNS).where(products.c.name == product_name) - ) - row: RowProxy | None = await result.first() - if row: - # NOTE: MD Observation: Currently we are not defensive, we assume automatically - # that the product is not billable when there is no product in the products_prices table - # or it's price is 0. We should change it and always assume that the product is billable, unless - # explicitely stated that it is free - payments = await get_product_payment_fields(conn, product_name=row.name) - return Product( - **dict(row.items()), - is_payment_enabled=payments.enabled, - credits_per_usd=payments.credits_per_usd, - ) - return None - - async def get_product_latest_price_info_or_none( - self, product_name: str - ) -> ProductPriceInfo | None: - """newest price of a product or None if not billable""" - async with self.engine.acquire() as conn: - return await get_product_latest_price_info_or_none( - conn, product_name=product_name - ) - - async def get_product_stripe_info(self, product_name: str) -> ProductStripeInfoGet: - async with self.engine.acquire() as conn: - row = await get_product_latest_stripe_info(conn, product_name=product_name) - return ProductStripeInfoGet( - stripe_price_id=row[0], stripe_tax_rate_id=row[1] - ) - - async def get_template_content( - self, - template_name: str, - ) -> str | None: - async with self.engine.acquire() as conn: - template_content: str | None = await conn.scalar( - sa.select(jinja2_templates.c.content).where( - jinja2_templates.c.name == template_name - ) - ) - return template_content - - async def get_product_template_content( - self, - product_name: str, - product_template: sa.Column = products.c.registration_email_template, - ) -> str | None: - async with self.engine.acquire() as conn: - oj = sa.join( - products, - jinja2_templates, - product_template == jinja2_templates.c.name, - isouter=True, - ) - content = await conn.scalar( - sa.select(jinja2_templates.c.content) - .select_from(oj) - .where(products.c.name == product_name) - ) - return f"{content}" if content else None diff --git a/services/web/server/src/simcore_service_webserver/products/_events.py b/services/web/server/src/simcore_service_webserver/products/_events.py deleted file mode 100644 index 836e43a902f..00000000000 --- a/services/web/server/src/simcore_service_webserver/products/_events.py +++ /dev/null @@ -1,108 +0,0 @@ -import logging -import tempfile -from collections import OrderedDict -from pathlib import Path - -from aiohttp import web -from aiopg.sa.engine import Engine -from aiopg.sa.result import RowProxy -from pydantic import ValidationError -from servicelib.exceptions import InvalidConfig -from simcore_postgres_database.utils_products import ( - get_default_product_name, - get_or_create_product_group, -) - -from .._constants import APP_PRODUCTS_KEY -from ..db.plugin import get_database_engine -from ..statics._constants import FRONTEND_APP_DEFAULT, FRONTEND_APPS_AVAILABLE -from ._db import get_product_payment_fields, iter_products -from ._model import Product - -_logger = logging.getLogger(__name__) - -APP_PRODUCTS_TEMPLATES_DIR_KEY = f"{__name__}.template_dir" - - -async def setup_product_templates(app: web.Application): - """ - builds a directory and download product templates - """ - with tempfile.TemporaryDirectory( - suffix=APP_PRODUCTS_TEMPLATES_DIR_KEY - ) as templates_dir: - app[APP_PRODUCTS_TEMPLATES_DIR_KEY] = Path(templates_dir) - - yield - - # cleanup - - -async def auto_create_products_groups(app: web.Application) -> None: - """Ensures all products have associated group ids - - Avoids having undefined groups in products with new products.group_id column - - NOTE: could not add this in 'setup_groups' (groups plugin) - since it has to be executed BEFORE 'load_products_on_startup' - """ - engine = get_database_engine(app) - - async with engine.acquire() as connection: - async for row in iter_products(connection): - product_name = row.name # type: ignore[attr-defined] # sqlalchemy - product_group_id = await get_or_create_product_group( - connection, product_name - ) - _logger.debug( - "Product with %s has an associated group with %s", - f"{product_name=}", - f"{product_group_id=}", - ) - - -def _set_app_state( - app: web.Application, - app_products: OrderedDict[str, Product], - default_product_name: str, -): - app[APP_PRODUCTS_KEY] = app_products - assert default_product_name in app_products # nosec - app[f"{APP_PRODUCTS_KEY}_default"] = default_product_name - - -async def load_products_on_startup(app: web.Application): - """ - Loads info on products stored in the database into app's storage (i.e. memory) - """ - app_products: OrderedDict[str, Product] = OrderedDict() - engine: Engine = get_database_engine(app) - async with engine.acquire() as connection: - async for row in iter_products(connection): - assert isinstance(row, RowProxy) # nosec - try: - name = row.name - - payments = await get_product_payment_fields( - connection, product_name=name - ) - - app_products[name] = Product( - **dict(row.items()), - is_payment_enabled=payments.enabled, - credits_per_usd=payments.credits_per_usd, - ) - - assert name in FRONTEND_APPS_AVAILABLE # nosec - - except ValidationError as err: - msg = f"Invalid product configuration in db '{row}':\n {err}" - raise InvalidConfig(msg) from err - - assert FRONTEND_APP_DEFAULT in app_products # nosec - - default_product_name = await get_default_product_name(connection) - - _set_app_state(app, app_products, default_product_name) - - _logger.debug("Product loaded: %s", [p.name for p in app_products.values()]) diff --git a/services/web/server/src/simcore_service_webserver/products/_handlers.py b/services/web/server/src/simcore_service_webserver/products/_handlers.py deleted file mode 100644 index 738dcd3c84f..00000000000 --- a/services/web/server/src/simcore_service_webserver/products/_handlers.py +++ /dev/null @@ -1,95 +0,0 @@ -import logging -from typing import Literal - -from aiohttp import web -from models_library.api_schemas_webserver.product import GetCreditPrice, GetProduct -from models_library.basic_types import IDStr -from models_library.rest_base import RequestParameters, StrictRequestParameters -from models_library.users import UserID -from pydantic import Field -from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as -from servicelib.request_keys import RQT_USERID_KEY - -from .._constants import RQ_PRODUCT_KEY -from .._meta import API_VTAG as VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import _api, api -from ._model import Product - -routes = web.RouteTableDef() - - -_logger = logging.getLogger(__name__) - - -class _ProductsRequestContext(RequestParameters): - user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] - product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] - - -@routes.get(f"/{VTAG}/credits-price", name="get_current_product_price") -@login_required -@permission_required("product.price.read") -async def _get_current_product_price(request: web.Request): - req_ctx = _ProductsRequestContext.model_validate(request) - price_info = await _api.get_current_product_credit_price_info(request) - - credit_price = GetCreditPrice( - product_name=req_ctx.product_name, - usd_per_credit=price_info.usd_per_credit if price_info else None, - min_payment_amount_usd=( - price_info.min_payment_amount_usd # type: ignore[arg-type] - if price_info - else None - ), - ) - return envelope_json_response(credit_price) - - -class _ProductsRequestParams(StrictRequestParameters): - product_name: IDStr | Literal["current"] - - -@routes.get(f"/{VTAG}/products/{{product_name}}", name="get_product") -@login_required -@permission_required("product.details.*") -async def _get_product(request: web.Request): - req_ctx = _ProductsRequestContext.model_validate(request) - path_params = parse_request_path_parameters_as(_ProductsRequestParams, request) - - if path_params.product_name == "current": - product_name = req_ctx.product_name - else: - product_name = path_params.product_name - - try: - product: Product = api.get_product(request.app, product_name=product_name) - except KeyError as err: - raise web.HTTPNotFound(reason=f"{product_name=} not found") from err - - assert "extra" in GetProduct.model_config # nosec - assert GetProduct.model_config["extra"] == "ignore" # nosec - data = GetProduct(**product.model_dump(), templates=[]) - return envelope_json_response(data) - - -class _ProductTemplateParams(_ProductsRequestParams): - template_id: IDStr - - -@routes.put( - f"/{VTAG}/products/{{product_name}}/templates/{{template_id}}", - name="update_product_template", -) -@login_required -@permission_required("product.details.*") -async def update_product_template(request: web.Request): - req_ctx = _ProductsRequestContext.model_validate(request) - path_params = parse_request_path_parameters_as(_ProductTemplateParams, request) - - assert req_ctx # nosec - assert path_params # nosec - - raise NotImplementedError diff --git a/services/web/server/src/simcore_service_webserver/products/_model.py b/services/web/server/src/simcore_service_webserver/products/_model.py deleted file mode 100644 index ef4b7f9498c..00000000000 --- a/services/web/server/src/simcore_service_webserver/products/_model.py +++ /dev/null @@ -1,289 +0,0 @@ -import logging -import re -import string -from typing import ( # noqa: UP035 # pydantic does not validate with re.Pattern - Annotated, - Any, -) - -from models_library.basic_regex import ( - PUBLIC_VARIABLE_NAME_RE, - TWILIO_ALPHANUMERIC_SENDER_ID_RE, -) -from models_library.basic_types import NonNegativeDecimal -from models_library.emails import LowerCaseEmailStr -from models_library.products import ProductName -from models_library.utils.change_case import snake_to_camel -from pydantic import ( - BaseModel, - BeforeValidator, - ConfigDict, - Field, - PositiveInt, - field_serializer, - field_validator, -) -from simcore_postgres_database.models.products import ( - EmailFeedback, - Forum, - IssueTracker, - Manual, - ProductLoginSettingsDict, - Vendor, - WebFeedback, -) -from sqlalchemy import Column - -from ..db.models import products -from ..statics._constants import FRONTEND_APPS_AVAILABLE - -_logger = logging.getLogger(__name__) - - -class Product(BaseModel): - """Model used to parse a row of pg product's table - - The info in this model is static and read-only - - SEE descriptions in packages/postgres-database/src/simcore_postgres_database/models/products.py - """ - - name: ProductName = Field(pattern=PUBLIC_VARIABLE_NAME_RE, validate_default=True) - - display_name: Annotated[str, Field(..., description="Long display name")] - short_name: str | None = Field( - None, - pattern=re.compile(TWILIO_ALPHANUMERIC_SENDER_ID_RE), - min_length=2, - max_length=11, - description="Short display name for SMS", - ) - - host_regex: Annotated[re.Pattern, BeforeValidator(str.strip)] = Field( - ..., description="Host regex" - ) - - support_email: Annotated[ - LowerCaseEmailStr, - Field( - description="Main support email." - " Other support emails can be defined under 'support' field", - ), - ] - - product_owners_email: Annotated[ - LowerCaseEmailStr | None, - Field(description="Used e.g. for account requests forms"), - ] = None - - twilio_messaging_sid: str | None = Field( - default=None, min_length=34, max_length=34, description="Identifier for SMS" - ) - - vendor: Vendor | None = Field( - None, - description="Vendor information such as company name, address, copyright, ...", - ) - - issues: list[IssueTracker] | None = None - - manuals: list[Manual] | None = None - - support: list[Forum | EmailFeedback | WebFeedback] | None = Field(None) - - login_settings: ProductLoginSettingsDict = Field( - ..., - description="Product customization of login settings. " - "Note that these are NOT the final plugin settings but those are obtained from login.settings.get_plugin_settings", - ) - - registration_email_template: str | None = Field( - None, json_schema_extra={"x_template_name": "registration_email"} - ) - - max_open_studies_per_user: PositiveInt | None = Field( - default=None, - description="Limits the number of studies a user may have open concurently (disabled if NULL)", - ) - - group_id: int | None = Field( - default=None, description="Groups associated to this product" - ) - - is_payment_enabled: bool = Field( - default=False, - description="True if this product offers credits", - ) - - credits_per_usd: NonNegativeDecimal | None = Field( - default=None, - description="Price of the credits in this product given in credit/USD. None for free product.", - ) - - min_payment_amount_usd: NonNegativeDecimal | None = Field( - default=None, - description="Price of the credits in this product given in credit/USD. None for free product.", - ) - - @field_validator("*", mode="before") - @classmethod - def _parse_empty_string_as_null(cls, v): - """Safe measure: database entries are sometimes left blank instead of null""" - if isinstance(v, str) and len(v.strip()) == 0: - return None - return v - - @field_validator("name", mode="before") - @classmethod - def _validate_name(cls, v): - if v not in FRONTEND_APPS_AVAILABLE: - msg = f"{v} is not in available front-end apps {FRONTEND_APPS_AVAILABLE}" - raise ValueError(msg) - return v - - @field_serializer("issues", "vendor") - @staticmethod - def _preserve_snake_case(v: Any) -> Any: - return v - - @property - def twilio_alpha_numeric_sender_id(self) -> str: - return self.short_name or self.display_name.replace(string.punctuation, "")[:11] - - model_config = ConfigDict( - alias_generator=snake_to_camel, - populate_by_name=True, - str_strip_whitespace=True, - frozen=True, - from_attributes=True, - extra="ignore", - json_schema_extra={ - "examples": [ - { - # fake mandatory - "name": "osparc", - "host_regex": r"([\.-]{0,1}osparc[\.-])", - "twilio_messaging_sid": "1" * 34, - "registration_email_template": "osparc_registration_email", - "login_settings": { - "LOGIN_2FA_REQUIRED": False, - }, - # defaults from sqlalchemy table - **{ - str(c.name): c.server_default.arg # type: ignore[union-attr] - for c in products.columns - if isinstance(c, Column) - and c.server_default - and isinstance(c.server_default.arg, str) # type: ignore[union-attr] - }, - }, - # Example of data in the dabase with a url set with blanks - { - "name": "tis", - "display_name": "TI PT", - "short_name": "TIPI", - "host_regex": r"(^tis[\.-])|(^ti-solutions\.)|(^ti-plan\.)", - "support_email": "support@foo.com", - "manual_url": "https://foo.com", - "issues_login_url": None, - "issues_new_url": "https://foo.com/new", - "feedback_form_url": "", # <-- blanks - "login_settings": { - "LOGIN_2FA_REQUIRED": False, - }, - }, - # full example - { - "name": "osparc", - "display_name": "o²S²PARC FOO", - "short_name": "osparcf", - "host_regex": "([\\.-]{0,1}osparcf[\\.-])", - "support_email": "foo@osparcf.io", - "vendor": { - "url": "https://acme.com", - "license_url": "https://acme.com/license", - "invitation_form": True, - "name": "ACME", - "copyright": "© ACME correcaminos", - }, - "issues": [ - { - "label": "github", - "login_url": "https://github.com/ITISFoundation/osparc-simcore", - "new_url": "https://github.com/ITISFoundation/osparc-simcore/issues/new/choose", - }, - { - "label": "fogbugz", - "login_url": "https://fogbugz.com/login", - "new_url": "https://fogbugz.com/new?project=123", - }, - ], - "manuals": [ - {"url": "doc.acme.com", "label": "main"}, - {"url": "yet-another-manual.acme.com", "label": "z43"}, - ], - "support": [ - { - "url": "forum.acme.com", - "kind": "forum", - "label": "forum", - }, - { - "kind": "email", - "email": "more-support@acme.com", - "label": "email", - }, - { - "url": "support.acme.com", - "kind": "web", - "label": "web-form", - }, - ], - "login_settings": { - "LOGIN_2FA_REQUIRED": False, - }, - "group_id": 12345, - "is_payment_enabled": False, - }, - ] - }, - ) - - # helpers ---- - - def to_statics(self) -> dict[str, Any]: - """ - Selects **public** fields from product's info - and prefixes it with its name to produce - items for statics.json (reachable by front-end) - """ - - # SECURITY WARNING: do not expose sensitive information here - # keys will be named as e.g. displayName, supportEmail, ... - return self.model_dump( - include={ - "display_name": True, - "support_email": True, - "vendor": True, - "issues": True, - "manuals": True, - "support": True, - "is_payment_enabled": True, - "is_dynamic_services_telemetry_enabled": True, - }, - exclude_none=True, - exclude_unset=True, - by_alias=True, - ) - - def get_template_name_for(self, filename: str) -> str | None: - """Checks for field marked with 'x_template_name' that fits the argument""" - template_name = filename.removesuffix(".jinja2") - for name, field in self.model_fields.items(): - if ( - field.json_schema_extra - and field.json_schema_extra.get("x_template_name") == template_name # type: ignore[union-attr] - ): - template_name_attribute: str = getattr(self, name) - return template_name_attribute - return None diff --git a/services/web/server/src/simcore_service_webserver/products/_models.py b/services/web/server/src/simcore_service_webserver/products/_models.py new file mode 100644 index 00000000000..dbab8b60a9b --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_models.py @@ -0,0 +1,334 @@ +import logging +import re +import string +from dataclasses import dataclass +from decimal import Decimal +from typing import Annotated, Any + +from models_library.basic_regex import ( + PUBLIC_VARIABLE_NAME_RE, + TWILIO_ALPHANUMERIC_SENDER_ID_RE, +) +from models_library.basic_types import NonNegativeDecimal +from models_library.emails import LowerCaseEmailStr +from models_library.products import ProductName, StripePriceID, StripeTaxRateID +from models_library.utils.change_case import snake_to_camel +from pydantic import ( + BaseModel, + BeforeValidator, + ConfigDict, + Field, + PositiveInt, + field_serializer, + field_validator, +) +from pydantic.config import JsonDict +from simcore_postgres_database.models.products import ( + EmailFeedback, + Forum, + IssueTracker, + Manual, + ProductLoginSettingsDict, + Vendor, + WebFeedback, + products, +) + +from ..constants import FRONTEND_APPS_AVAILABLE + +_logger = logging.getLogger(__name__) + + +@dataclass(frozen=True) +class CreditResult: + product_name: ProductName + credit_amount: Decimal + + +@dataclass(frozen=True) +class ProductStripeInfo: + stripe_price_id: StripePriceID + stripe_tax_rate_id: StripeTaxRateID + + +@dataclass(frozen=True) +class PaymentFields: + enabled: bool + credits_per_usd: Decimal | None + min_payment_amount_usd: Decimal | None + + +class Product(BaseModel): + """Model used to parse a row of pg product's table + + The info in this model is static and read-only + + SEE descriptions in packages/postgres-database/src/simcore_postgres_database/models/products.py + """ + + name: Annotated[ + ProductName, + Field(pattern=PUBLIC_VARIABLE_NAME_RE, validate_default=True), + ] + + display_name: Annotated[str, Field(..., description="Long display name")] + short_name: Annotated[ + str | None, + Field( + None, + pattern=re.compile(TWILIO_ALPHANUMERIC_SENDER_ID_RE), + min_length=2, + max_length=11, + description="Short display name for SMS", + ), + ] + + host_regex: Annotated[ + re.Pattern, BeforeValidator(str.strip), Field(..., description="Host regex") + ] + + support_email: Annotated[ + LowerCaseEmailStr, + Field( + description="Main support email." + " Other support emails can be defined under 'support' field", + ), + ] + + product_owners_email: Annotated[ + LowerCaseEmailStr | None, + Field(description="Used e.g. for account requests forms"), + ] = None + + twilio_messaging_sid: Annotated[ + str | None, + Field(min_length=34, max_length=34, description="Identifier for SMS"), + ] = None + + vendor: Annotated[ + Vendor | None, + Field( + description="Vendor information such as company name, address, copyright, ...", + ), + ] = None + + issues: list[IssueTracker] | None = None + + manuals: list[Manual] | None = None + + support: list[Forum | EmailFeedback | WebFeedback] | None = Field(None) + + login_settings: Annotated[ + ProductLoginSettingsDict, + Field( + description="Product customization of login settings. " + "Note that these are NOT the final plugin settings but those are obtained from login.settings.get_plugin_settings", + ), + ] + + registration_email_template: Annotated[ + str | None, Field(json_schema_extra={"x_template_name": "registration_email"}) + ] = None + + max_open_studies_per_user: Annotated[ + PositiveInt | None, + Field( + description="Limits the number of studies a user may have open concurently (disabled if NULL)", + ), + ] = None + + group_id: Annotated[ + int | None, Field(description="Groups associated to this product") + ] = None + + is_payment_enabled: Annotated[ + bool, + Field( + description="True if this product offers credits", + ), + ] = False + + credits_per_usd: Annotated[ + NonNegativeDecimal | None, + Field( + description="Price of the credits in this product given in credit/USD. None for free product.", + ), + ] = None + + min_payment_amount_usd: Annotated[ + NonNegativeDecimal | None, + Field( + description="Price of the credits in this product given in credit/USD. None for free product.", + ), + ] = None + + ## Guarantees when loaded from a database --------------- + + @field_validator("*", mode="before") + @classmethod + def _parse_empty_string_as_null(cls, v): + """Safe measure: database entries are sometimes left blank instead of null""" + if isinstance(v, str) and len(v.strip()) == 0: + return None + return v + + @field_validator("name", mode="before") + @classmethod + def _check_is_valid_product_name(cls, v): + if v not in FRONTEND_APPS_AVAILABLE: + msg = f"{v} is not in available front-end apps {FRONTEND_APPS_AVAILABLE}" + raise ValueError(msg) + return v + + @field_serializer("issues", "vendor") + @staticmethod + def _preserve_snake_case(v: Any) -> Any: + return v + + @property + def twilio_alpha_numeric_sender_id(self) -> str: + return self.short_name or self.display_name.replace(string.punctuation, "")[:11] + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + from sqlalchemy import Column + + schema.update( + { + "examples": [ + { + # fake mandatory + "name": "osparc", + "host_regex": r"([\.-]{0,1}osparc[\.-])", + "twilio_messaging_sid": "1" * 34, + "registration_email_template": "osparc_registration_email", + "login_settings": { + "LOGIN_2FA_REQUIRED": False, + }, + # defaults from sqlalchemy table + **{ + str(c.name): c.server_default.arg # type: ignore[union-attr] + for c in products.columns + if isinstance(c, Column) + and c.server_default + and isinstance(c.server_default.arg, str) # type: ignore[union-attr] + }, + }, + # Example of data in the dabase with a url set with blanks + { + "name": "tis", + "display_name": "TI PT", + "short_name": "TIPI", + "host_regex": r"(^tis[\.-])|(^ti-solutions\.)|(^ti-plan\.)", + "support_email": "support@foo.com", + "manual_url": "https://foo.com", + "issues_login_url": None, + "issues_new_url": "https://foo.com/new", + "feedback_form_url": "", # <-- blanks + "login_settings": { + "LOGIN_2FA_REQUIRED": False, + }, + }, + # Full example + { + "name": "osparc", + "display_name": "o²S²PARC FOO", + "short_name": "osparcf", + "host_regex": "([\\.-]{0,1}osparcf[\\.-])", + "support_email": "foo@osparcf.io", + "vendor": { + "url": "https://acme.com", + "license_url": "https://acme.com/license", + "invitation_form": True, + "name": "ACME", + "copyright": "© ACME correcaminos", + }, + "issues": [ + { + "label": "github", + "login_url": "https://github.com/ITISFoundation/osparc-simcore", + "new_url": "https://github.com/ITISFoundation/osparc-simcore/issues/new/choose", + }, + { + "label": "fogbugz", + "login_url": "https://fogbugz.com/login", + "new_url": "https://fogbugz.com/new?project=123", + }, + ], + "manuals": [ + {"url": "doc.acme.com", "label": "main"}, + {"url": "yet-another-manual.acme.com", "label": "z43"}, + ], + "support": [ + { + "url": "forum.acme.com", + "kind": "forum", + "label": "forum", + }, + { + "kind": "email", + "email": "more-support@acme.com", + "label": "email", + }, + { + "url": "support.acme.com", + "kind": "web", + "label": "web-form", + }, + ], + "login_settings": { + "LOGIN_2FA_REQUIRED": False, + }, + "group_id": 12345, + "is_payment_enabled": False, + }, + ] + }, + ) + + model_config = ConfigDict( + alias_generator=snake_to_camel, + populate_by_name=True, + str_strip_whitespace=True, + frozen=True, + from_attributes=True, + extra="ignore", + json_schema_extra=_update_json_schema_extra, + ) + + def to_statics(self) -> dict[str, Any]: + """ + Selects **public** fields from product's info + and prefixes it with its name to produce + items for statics.json (reachable by front-end) + """ + + # SECURITY WARNING: do not expose sensitive information here + # keys will be named as e.g. displayName, supportEmail, ... + return self.model_dump( + include={ + "display_name": True, + "support_email": True, + "vendor": True, + "issues": True, + "manuals": True, + "support": True, + "is_payment_enabled": True, + "is_dynamic_services_telemetry_enabled": True, + }, + exclude_none=True, + exclude_unset=True, + by_alias=True, + ) + + def get_template_name_for(self, filename: str) -> str | None: + """Checks for field marked with 'x_template_name' that fits the argument""" + template_name = filename.removesuffix(".jinja2") + for name, field in self.model_fields.items(): + if ( + field.json_schema_extra + and field.json_schema_extra.get("x_template_name") == template_name # type: ignore[union-attr] + ): + template_name_attribute: str = getattr(self, name) + return template_name_attribute + return None diff --git a/services/web/server/src/simcore_service_webserver/products/_repository.py b/services/web/server/src/simcore_service_webserver/products/_repository.py new file mode 100644 index 00000000000..16a677b0c82 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_repository.py @@ -0,0 +1,233 @@ +import logging +from decimal import Decimal +from typing import Any + +import sqlalchemy as sa +from models_library.groups import GroupID +from models_library.products import ProductName +from simcore_postgres_database.constants import QUANTIZE_EXP_ARG +from simcore_postgres_database.models.jinja2_templates import jinja2_templates +from simcore_postgres_database.models.products import products +from simcore_postgres_database.utils_products import ( + get_default_product_name, + get_or_create_product_group, +) +from simcore_postgres_database.utils_products_prices import ( + ProductPriceInfo, + get_product_latest_price_info_or_none, + get_product_latest_stripe_info_or_none, +) +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from simcore_service_webserver.constants import FRONTEND_APPS_AVAILABLE +from sqlalchemy.engine import Row +from sqlalchemy.ext.asyncio import AsyncConnection + +from ..constants import FRONTEND_APPS_AVAILABLE +from ..db.base_repository import BaseRepository +from ._models import PaymentFields, Product, ProductStripeInfo + +_logger = logging.getLogger(__name__) + + +# +# REPOSITORY +# + +# NOTE: This also asserts that all model fields are in sync with sqlalchemy columns +_PRODUCTS_COLUMNS = [ + products.c.name, + products.c.display_name, + products.c.short_name, + products.c.host_regex, + products.c.support_email, + products.c.product_owners_email, + products.c.twilio_messaging_sid, + products.c.vendor, + products.c.issues, + products.c.manuals, + products.c.support, + products.c.login_settings, + products.c.registration_email_template, + products.c.max_open_studies_per_user, + products.c.group_id, +] + +assert {column.name for column in _PRODUCTS_COLUMNS}.issubset( # nosec + set(Product.model_fields) +) + + +def _to_domain(products_row: Row, payments: PaymentFields) -> Product: + return Product( + **products_row._asdict(), + is_payment_enabled=payments.enabled, + credits_per_usd=payments.credits_per_usd, + ) + + +async def _get_product_payment_fields( + conn: AsyncConnection, product_name: ProductName +) -> PaymentFields: + price_info = await get_product_latest_price_info_or_none( + conn, product_name=product_name + ) + if price_info is None or price_info.usd_per_credit == 0: + return PaymentFields( + enabled=False, + credits_per_usd=None, + min_payment_amount_usd=None, + ) + + assert price_info.usd_per_credit > 0 # nosec + assert price_info.min_payment_amount_usd > 0 # nosec + + return PaymentFields( + enabled=True, + credits_per_usd=Decimal(1 / price_info.usd_per_credit).quantize( + QUANTIZE_EXP_ARG + ), + min_payment_amount_usd=price_info.min_payment_amount_usd, + ) + + +class ProductRepository(BaseRepository): + + async def list_products( + self, + connection: AsyncConnection | None = None, + ) -> list[Product]: + """ + Raises: + ValidationError:if products are not setup correctly in the database + """ + app_products: list[Product] = [] + + query = sa.select(*_PRODUCTS_COLUMNS).order_by(products.c.priority) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + rows = await conn.stream(query) + async for row in rows: + name = row.name + payments = await _get_product_payment_fields(conn, product_name=name) + app_products.append(_to_domain(row, payments)) + + assert name in FRONTEND_APPS_AVAILABLE # nosec + + return app_products + + async def list_products_names( + self, + connection: AsyncConnection | None = None, + ) -> list[ProductName]: + query = sa.select(products.c.name).order_by(products.c.priority) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + rows = await conn.stream(query) + return [ProductName(row.name) async for row in rows] + + async def get_product( + self, product_name: str, connection: AsyncConnection | None = None + ) -> Product | None: + query = sa.select(*_PRODUCTS_COLUMNS).where(products.c.name == product_name) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute(query) + if row := result.one_or_none(): + payments = await _get_product_payment_fields( + conn, product_name=row.name + ) + return _to_domain(row, payments) + return None + + async def get_default_product_name( + self, connection: AsyncConnection | None = None + ) -> ProductName: + async with pass_or_acquire_connection(self.engine, connection) as conn: + return await get_default_product_name(conn) + + async def get_product_latest_price_info_or_none( + self, product_name: str, connection: AsyncConnection | None = None + ) -> ProductPriceInfo | None: + async with pass_or_acquire_connection(self.engine, connection) as conn: + return await get_product_latest_price_info_or_none( + conn, product_name=product_name + ) + + async def get_product_stripe_info_or_none( + self, product_name: str, connection: AsyncConnection | None = None + ) -> ProductStripeInfo | None: + async with pass_or_acquire_connection(self.engine, connection) as conn: + latest_stripe_info = await get_product_latest_stripe_info_or_none( + conn, product_name=product_name + ) + if latest_stripe_info is None: + return None + + stripe_price_id, stripe_tax_rate_id = latest_stripe_info + return ProductStripeInfo( + stripe_price_id=stripe_price_id, stripe_tax_rate_id=stripe_tax_rate_id + ) + + async def get_template_content( + self, template_name: str, connection: AsyncConnection | None = None + ) -> str | None: + query = sa.select(jinja2_templates.c.content).where( + jinja2_templates.c.name == template_name + ) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + template_content: str | None = await conn.scalar(query) + return template_content + + async def get_product_template_content( + self, + product_name: str, + product_template: sa.Column = products.c.registration_email_template, + connection: AsyncConnection | None = None, + ) -> str | None: + query = ( + sa.select(jinja2_templates.c.content) + .select_from( + sa.join( + products, + jinja2_templates, + product_template == jinja2_templates.c.name, + isouter=True, + ) + ) + .where(products.c.name == product_name) + ) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + template_content: str | None = await conn.scalar(query) + return template_content + + async def get_product_ui( + self, product_name: ProductName, connection: AsyncConnection | None = None + ) -> dict[str, Any] | None: + query = sa.select(products.c.ui).where(products.c.name == product_name) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute(query) + row = result.one_or_none() + return dict(**row.ui) if row else None + + async def auto_create_products_groups( + self, + connection: AsyncConnection | None = None, + ) -> dict[ProductName, GroupID]: + product_groups_map: dict[ProductName, GroupID] = {} + + product_names = await self.list_products_names(connection) + for product_name in product_names: + # NOTE: transaction is per product. fail-fast! + async with transaction_context(self.engine, connection) as conn: + product_group_id: GroupID = await get_or_create_product_group( + conn, product_name + ) + product_groups_map[product_name] = product_group_id + + return product_groups_map diff --git a/services/web/server/src/simcore_service_webserver/products/_rpc.py b/services/web/server/src/simcore_service_webserver/products/_rpc.py deleted file mode 100644 index 4a4ee46a655..00000000000 --- a/services/web/server/src/simcore_service_webserver/products/_rpc.py +++ /dev/null @@ -1,29 +0,0 @@ -from decimal import Decimal - -from aiohttp import web -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE -from models_library.products import CreditResultGet, ProductName -from servicelib.rabbitmq import RPCRouter - -from ..rabbitmq import get_rabbitmq_rpc_server -from . import _api - -router = RPCRouter() - - -@router.expose() -async def get_credit_amount( - app: web.Application, - *, - dollar_amount: Decimal, - product_name: ProductName, -) -> CreditResultGet: - credit_result_get: CreditResultGet = await _api.get_credit_amount( - app, dollar_amount=dollar_amount, product_name=product_name - ) - return credit_result_get - - -async def register_rpc_routes_on_startup(app: web.Application): - rpc_server = get_rabbitmq_rpc_server(app) - await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) diff --git a/services/web/server/src/simcore_service_webserver/products/_service.py b/services/web/server/src/simcore_service_webserver/products/_service.py new file mode 100644 index 00000000000..032f20d8083 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_service.py @@ -0,0 +1,143 @@ +from decimal import Decimal +from typing import Any + +from aiohttp import web +from models_library.groups import GroupID +from models_library.products import ProductName +from pydantic import ValidationError +from servicelib.exceptions import InvalidConfig +from simcore_postgres_database.utils_products_prices import ProductPriceInfo + +from ..constants import APP_PRODUCTS_KEY +from ._models import CreditResult, ProductStripeInfo +from ._repository import ProductRepository +from .errors import ( + BelowMinimumPaymentError, + MissingStripeConfigError, + ProductNotFoundError, + ProductPriceNotDefinedError, + ProductTemplateNotFoundError, +) +from .models import Product + + +async def load_products(app: web.Application) -> list[Product]: + repo = ProductRepository.create_from_app(app) + try: + # NOTE: list_products implemented as fails-fast! + return await repo.list_products() + except ValidationError as err: + msg = f"Invalid product configuration in db:\n {err}" + raise InvalidConfig(msg) from err + + +async def get_default_product_name(app: web.Application) -> ProductName: + repo = ProductRepository.create_from_app(app) + return await repo.get_default_product_name() + + +def get_product(app: web.Application, product_name: ProductName) -> Product: + try: + product: Product = app[APP_PRODUCTS_KEY][product_name] + return product + except KeyError as exc: + raise ProductNotFoundError(product_name=product_name) from exc + + +def list_products(app: web.Application) -> list[Product]: + products: list[Product] = list(app[APP_PRODUCTS_KEY].values()) + return products + + +async def list_products_names(app: web.Application) -> list[ProductName]: + repo = ProductRepository.create_from_app(app) + names: list[ProductName] = await repo.list_products_names() + return names + + +async def get_credit_price_info( + app: web.Application, product_name: ProductName +) -> ProductPriceInfo | None: + repo = ProductRepository.create_from_app(app) + return await repo.get_product_latest_price_info_or_none(product_name) + + +async def get_product_ui( + repo: ProductRepository, product_name: ProductName +) -> dict[str, Any]: + ui = await repo.get_product_ui(product_name=product_name) + if ui is not None: + return ui + + raise ProductNotFoundError(product_name=product_name) + + +async def get_credit_amount( + app: web.Application, + *, + dollar_amount: Decimal, + product_name: ProductName, +) -> CreditResult: + """For provided dollars and product gets credit amount. + + NOTE: Contrary to other product api functions (e.g. get_current_product) this function + gets the latest update from the database. Otherwise, products are loaded + on startup and cached therefore in those cases would require a restart + of the service for the latest changes to take effect. + + Raises: + ProductPriceNotDefinedError + BelowMinimumPaymentError + + """ + repo = ProductRepository.create_from_app(app) + price_info = await repo.get_product_latest_price_info_or_none(product_name) + if price_info is None or not price_info.usd_per_credit: + # '0 or None' should raise + raise ProductPriceNotDefinedError( + reason=f"Product {product_name} usd_per_credit is either not defined or zero" + ) + + if dollar_amount < price_info.min_payment_amount_usd: + raise BelowMinimumPaymentError( + amount_usd=dollar_amount, + min_payment_amount_usd=price_info.min_payment_amount_usd, + ) + + credit_amount = dollar_amount / price_info.usd_per_credit + return CreditResult(product_name=product_name, credit_amount=credit_amount) + + +async def get_product_stripe_info( + app: web.Application, *, product_name: ProductName +) -> ProductStripeInfo: + repo = ProductRepository.create_from_app(app) + + product_stripe_info = await repo.get_product_stripe_info_or_none(product_name) + if ( + product_stripe_info is None + or "missing!!" in product_stripe_info.stripe_price_id + or "missing!!" in product_stripe_info.stripe_tax_rate_id + ): + exc = MissingStripeConfigError( + product_name=product_name, + product_stripe_info=product_stripe_info, + ) + exc.add_note("Probably stripe side is not configured") + raise exc + return product_stripe_info + + +async def get_template_content(app: web.Application, *, template_name: str): + repo = ProductRepository.create_from_app(app) + content = await repo.get_template_content(template_name) + if not content: + raise ProductTemplateNotFoundError(template_name=template_name) + return content + + +async def auto_create_products_groups( + app: web.Application, +) -> dict[ProductName, GroupID]: + repo = ProductRepository.create_from_app(app) + return await repo.auto_create_products_groups() diff --git a/services/web/server/src/simcore_service_webserver/products/_web_events.py b/services/web/server/src/simcore_service_webserver/products/_web_events.py new file mode 100644 index 00000000000..7000cb21b1e --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_web_events.py @@ -0,0 +1,79 @@ +import logging +import tempfile +from pathlib import Path +from pprint import pformat + +from aiohttp import web +from models_library.products import ProductName + +from ..constants import APP_PRODUCTS_KEY +from . import _service +from ._models import Product + +_logger = logging.getLogger(__name__) + +APP_PRODUCTS_TEMPLATES_DIR_KEY = f"{__name__}.template_dir" + + +async def _auto_create_products_groups(app: web.Application) -> None: + """Ensures all products have associated group ids + + Avoids having undefined groups in products with new products.group_id column + + NOTE: could not add this in 'setup_groups' (groups plugin) + since it has to be executed BEFORE 'load_products_on_startup' + """ + product_groups_map = await _service.auto_create_products_groups(app) + _logger.debug("Products group IDs: %s", pformat(product_groups_map)) + + +def _set_app_state( + app: web.Application, + app_products: dict[ProductName, Product], + default_product_name: str, +): + # NOTE: products are checked on every request, therefore we + # cache them in the `app` upon startup + app[APP_PRODUCTS_KEY] = app_products + assert default_product_name in app_products # nosec + app[f"{APP_PRODUCTS_KEY}_default"] = default_product_name + + +async def _load_products_on_startup(app: web.Application): + """ + Loads info on products stored in the database into app's storage (i.e. memory) + """ + app_products: dict[ProductName, Product] = { + product.name: product for product in await _service.load_products(app) + } + + default_product_name = await _service.get_default_product_name(app) + + _set_app_state(app, app_products, default_product_name) + assert APP_PRODUCTS_KEY in app # nosec + + _logger.debug("Product loaded: %s", list(app_products)) + + +async def _setup_product_templates(app: web.Application): + """ + builds a directory and download product templates + """ + with tempfile.TemporaryDirectory( + suffix=APP_PRODUCTS_TEMPLATES_DIR_KEY + ) as templates_dir: + app[APP_PRODUCTS_TEMPLATES_DIR_KEY] = Path(templates_dir) + + yield + + # cleanup + + +def setup_web_events(app: web.Application): + + app.on_startup.append( + # NOTE: must go BEFORE _load_products_on_startup + _auto_create_products_groups + ) + app.on_startup.append(_load_products_on_startup) + app.cleanup_ctx.append(_setup_product_templates) diff --git a/services/web/server/src/simcore_service_webserver/products/_web_helpers.py b/services/web/server/src/simcore_service_webserver/products/_web_helpers.py new file mode 100644 index 00000000000..859793d9e0a --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/_web_helpers.py @@ -0,0 +1,117 @@ +import contextlib +from pathlib import Path + +import aiofiles +from aiohttp import web +from models_library.products import ProductName +from simcore_postgres_database.utils_products_prices import ProductPriceInfo +from simcore_service_webserver.products.errors import ( + FileTemplateNotFoundError, + ProductNotFoundError, + UnknownProductError, +) + +from .._resources import webserver_resources +from ..constants import RQ_PRODUCT_KEY +from . import _service +from ._web_events import APP_PRODUCTS_TEMPLATES_DIR_KEY +from .models import Product + + +def get_product_name(request: web.Request) -> str: + """Returns product name in request but might be undefined""" + # NOTE: introduced by middleware + try: + product_name: str = request[RQ_PRODUCT_KEY] + except KeyError as exc: + error = UnknownProductError() + error.add_note("TIP: Check products middleware") + raise error from exc + return product_name + + +def get_current_product(request: web.Request) -> Product: + """Returns product associated to current request""" + product_name: ProductName = get_product_name(request) + current_product: Product = _service.get_product( + request.app, product_name=product_name + ) + return current_product + + +def _get_current_product_or_none(request: web.Request) -> Product | None: + with contextlib.suppress(ProductNotFoundError, UnknownProductError): + product: Product = get_current_product(request) + return product + return None + + +async def get_current_product_credit_price_info( + request: web.Request, +) -> ProductPriceInfo | None: + """Gets latest credit price for this product. + + NOTE: Contrary to other product api functions (e.g. get_current_product) this function + gets the latest update from the database. Otherwise, products are loaded + on startup and cached therefore in those cases would require a restart + of the service for the latest changes to take effect. + """ + current_product_name = get_product_name(request) + return await _service.get_credit_price_info( + request.app, product_name=current_product_name + ) + + +def _themed(dirname: str, template: str) -> Path: + path: Path = webserver_resources.get_path(f"{Path(dirname) / template}") + return path + + +async def _get_common_template_path(filename: str) -> Path: + common_template = _themed("templates/common", filename) + if not common_template.exists(): + raise FileTemplateNotFoundError(filename=filename) + return common_template + + +async def _cache_template_content( + request: web.Request, template_path: Path, template_name: str +) -> None: + content = await _service.get_template_content( + request.app, template_name=template_name + ) + try: + async with aiofiles.open(template_path, "w") as fh: + await fh.write(content) + except Exception: + if template_path.exists(): + template_path.unlink() + raise + + +async def _get_product_specific_template_path( + request: web.Request, product: Product, filename: str +) -> Path | None: + if template_name := product.get_template_name_for(filename): + template_dir: Path = request.app[APP_PRODUCTS_TEMPLATES_DIR_KEY] + template_path = template_dir / template_name + if not template_path.exists(): + await _cache_template_content(request, template_path, template_name) + return template_path + + template_path = _themed(f"templates/{product.name}", filename) + if template_path.exists(): + return template_path + + return None + + +async def get_product_template_path(request: web.Request, filename: str) -> Path: + if (product := _get_current_product_or_none(request)) and ( + template_path := await _get_product_specific_template_path( + request, product, filename + ) + ): + return template_path + + return await _get_common_template_path(filename) diff --git a/services/web/server/src/simcore_service_webserver/products/_middlewares.py b/services/web/server/src/simcore_service_webserver/products/_web_middlewares.py similarity index 97% rename from services/web/server/src/simcore_service_webserver/products/_middlewares.py rename to services/web/server/src/simcore_service_webserver/products/_web_middlewares.py index 5a962e25ef7..e82a1a54f5b 100644 --- a/services/web/server/src/simcore_service_webserver/products/_middlewares.py +++ b/services/web/server/src/simcore_service_webserver/products/_web_middlewares.py @@ -6,9 +6,9 @@ from servicelib.aiohttp.typing_extension import Handler from servicelib.rest_constants import X_PRODUCT_NAME_HEADER -from .._constants import APP_PRODUCTS_KEY, RQ_PRODUCT_KEY from .._meta import API_VTAG -from ._model import Product +from ..constants import APP_PRODUCTS_KEY, RQ_PRODUCT_KEY +from .models import Product _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/products/api.py b/services/web/server/src/simcore_service_webserver/products/api.py deleted file mode 100644 index 81b7718dc5e..00000000000 --- a/services/web/server/src/simcore_service_webserver/products/api.py +++ /dev/null @@ -1,26 +0,0 @@ -from models_library.products import ProductName - -from ._api import ( - get_credit_amount, - get_current_product, - get_product, - get_product_name, - get_product_stripe_info, - get_product_template_path, - list_products, -) -from ._model import Product - -__all__: tuple[str, ...] = ( - "get_credit_amount", - "get_current_product", - "get_product_name", - "get_product_stripe_info", - "get_product_template_path", - "get_product", - "list_products", - "Product", - "ProductName", -) - -# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/products/errors.py b/services/web/server/src/simcore_service_webserver/products/errors.py index 77c24849965..3b0da3564f5 100644 --- a/services/web/server/src/simcore_service_webserver/products/errors.py +++ b/services/web/server/src/simcore_service_webserver/products/errors.py @@ -1,13 +1,15 @@ -""" - API plugin errors -""" +from ..errors import WebServerBaseError -from ..errors import WebServerBaseError +class ProductError(WebServerBaseError, ValueError): ... + +class UnknownProductError(ProductError): + msg_template = "Cannot determine which is the product in the current context" -class ProductError(WebServerBaseError, ValueError): - ... + +class ProductNotFoundError(ProductError): + msg_template = "Undefined product '{product_name}'" class ProductPriceNotDefinedError(ProductError): @@ -16,3 +18,18 @@ class ProductPriceNotDefinedError(ProductError): class BelowMinimumPaymentError(ProductError): msg_template = "Payment of {amount_usd} USD is below the required minimum of {min_payment_amount_usd} USD" + + +class ProductTemplateNotFoundError(ProductError): + msg_template = "Missing template {template_name} for product" + + +class MissingStripeConfigError(ProductError): + msg_template = ( + "Missing product stripe for product {product_name}.\n" + "NOTE: This is currently setup manually by the operator in pg database via adminer and also in the stripe platform." + ) + + +class FileTemplateNotFoundError(ProductError): + msg_template = "{filename} is not part of the templates/common" diff --git a/services/web/server/src/simcore_service_webserver/products/models.py b/services/web/server/src/simcore_service_webserver/products/models.py new file mode 100644 index 00000000000..4625012a484 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/models.py @@ -0,0 +1,11 @@ +from models_library.products import ProductName + +from ._models import CreditResult, Product, ProductStripeInfo + +__all__: tuple[str, ...] = ( + "CreditResult", + "Product", + "ProductName", + "ProductStripeInfo", +) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/products/plugin.py b/services/web/server/src/simcore_service_webserver/products/plugin.py index 70483623419..5aea6edcf7e 100644 --- a/services/web/server/src/simcore_service_webserver/products/plugin.py +++ b/services/web/server/src/simcore_service_webserver/products/plugin.py @@ -8,22 +8,11 @@ """ - import logging from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY -from ..rabbitmq import setup_rabbitmq -from . import _handlers, _invitations_handlers, _rpc -from ._events import ( - auto_create_products_groups, - load_products_on_startup, - setup_product_templates, -) -from ._middlewares import discover_product_middleware - _logger = logging.getLogger(__name__) @@ -35,24 +24,20 @@ logger=_logger, ) def setup_products(app: web.Application): + # + # NOTE: internal import speeds up booting app + # specially if this plugin is not set up to be loaded + # + from ..constants import APP_SETTINGS_KEY + from . import _web_events, _web_middlewares + from ._controller import rest, rpc + assert app[APP_SETTINGS_KEY].WEBSERVER_PRODUCTS is True # nosec - # middlewares - app.middlewares.append(discover_product_middleware) - - # routes - app.router.add_routes(_handlers.routes) - app.router.add_routes(_invitations_handlers.routes) - - # rpc api - setup_rabbitmq(app) - if app[APP_SETTINGS_KEY].WEBSERVER_RABBITMQ: - app.on_startup.append(_rpc.register_rpc_routes_on_startup) - - # events - app.on_startup.append( - # NOTE: must go BEFORE load_products_on_startup - auto_create_products_groups - ) - app.on_startup.append(load_products_on_startup) - app.cleanup_ctx.append(setup_product_templates) + app.middlewares.append(_web_middlewares.discover_product_middleware) + + app.router.add_routes(rest.routes) + + rpc.setup_rpc(app) + + _web_events.setup_web_events(app) diff --git a/services/web/server/src/simcore_service_webserver/products/products_service.py b/services/web/server/src/simcore_service_webserver/products/products_service.py new file mode 100644 index 00000000000..d21a0e9a27e --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/products_service.py @@ -0,0 +1,19 @@ +from ._service import ( + get_credit_amount, + get_product, + get_product_stripe_info, + get_product_ui, + list_products, + list_products_names, +) + +__all__: tuple[str, ...] = ( + "get_credit_amount", + "get_product", + "get_product_stripe_info", + "get_product_ui", + "list_products", + "list_products_names", +) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/products/products_web.py b/services/web/server/src/simcore_service_webserver/products/products_web.py new file mode 100644 index 00000000000..38ddb1634ec --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/products/products_web.py @@ -0,0 +1,14 @@ +from ._web_helpers import ( + get_current_product, + get_current_product_credit_price_info, + get_product_name, + get_product_template_path, +) + +__all__: tuple[str, ...] = ( + "get_current_product", + "get_current_product_credit_price_info", + "get_product_name", + "get_product_template_path", +) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights_db.py b/services/web/server/src/simcore_service_webserver/projects/_access_rights_repository.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/projects/_access_rights_db.py rename to services/web/server/src/simcore_service_webserver/projects/_access_rights_repository.py diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights_api.py b/services/web/server/src/simcore_service_webserver/projects/_access_rights_service.py similarity index 95% rename from services/web/server/src/simcore_service_webserver/projects/_access_rights_api.py rename to services/web/server/src/simcore_service_webserver/projects/_access_rights_service.py index 805b46fa65e..72f94eba0dd 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_access_rights_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_access_rights_service.py @@ -2,12 +2,12 @@ from models_library.products import ProductName from models_library.projects import ProjectID from models_library.users import UserID -from simcore_service_webserver.projects._db_utils import PermissionStr from ..db.plugin import get_database_engine from ..workspaces.api import get_workspace -from ._access_rights_db import get_project_owner -from .db import APP_PROJECT_DBAPI, ProjectDBAPI +from ._access_rights_repository import get_project_owner +from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI +from ._projects_repository_legacy_utils import PermissionStr from .exceptions import ProjectInvalidRightsError, ProjectNotFoundError from .models import UserProjectAccessRightsWithWorkspace diff --git a/services/web/server/src/simcore_service_webserver/projects/_comments_db.py b/services/web/server/src/simcore_service_webserver/projects/_comments_repository.py similarity index 96% rename from services/web/server/src/simcore_service_webserver/projects/_comments_db.py rename to services/web/server/src/simcore_service_webserver/projects/_comments_repository.py index 0cc52bea1e7..1a871f12ed3 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_comments_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_comments_repository.py @@ -1,8 +1,3 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" import logging from aiopg.sa.result import ResultProxy diff --git a/services/web/server/src/simcore_service_webserver/projects/_comments_api.py b/services/web/server/src/simcore_service_webserver/projects/_comments_service.py similarity index 92% rename from services/web/server/src/simcore_service_webserver/projects/_comments_api.py rename to services/web/server/src/simcore_service_webserver/projects/_comments_service.py index 55cfedac30c..7999d1e591c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_comments_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_comments_service.py @@ -10,7 +10,7 @@ from models_library.users import UserID from pydantic import PositiveInt -from .db import APP_PROJECT_DBAPI, ProjectDBAPI +from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI log = logging.getLogger(__name__) @@ -39,9 +39,9 @@ async def list_project_comments( ) -> list[ProjectsCommentsAPI]: db: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] - projects_comments_db_model: list[ - ProjectsCommentsDB - ] = await db.list_project_comments(project_uuid, offset, limit) + projects_comments_db_model: list[ProjectsCommentsDB] = ( + await db.list_project_comments(project_uuid, offset, limit) + ) projects_comments_api_model = [ ProjectsCommentsAPI(**comment.model_dump()) for comment in projects_comments_db_model diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/projects/_controller/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_exceptions.py new file mode 100644 index 00000000000..ad2db124670 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_exceptions.py @@ -0,0 +1,217 @@ +import itertools +import logging +from collections import Counter + +from servicelib.aiohttp import status +from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( + CatalogForbiddenError, + CatalogItemNotFoundError, + CatalogNotAvailableError, +) + +from ...exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ...folders.errors import FolderAccessForbiddenError, FolderNotFoundError +from ...resource_usage.errors import DefaultPricingPlanNotFoundError +from ...users.exceptions import UserDefaultWalletNotFoundError +from ...wallets.errors import WalletAccessForbiddenError, WalletNotEnoughCreditsError +from ...workspaces.errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError +from ..exceptions import ( + ClustersKeeperNotAvailableError, + DefaultPricingUnitNotFoundError, + NodeNotFoundError, + ParentNodeNotFoundError, + ProjectDeleteError, + ProjectGroupNotFoundError, + ProjectInDebtCanNotChangeWalletError, + ProjectInDebtCanNotOpenError, + ProjectInvalidRightsError, + ProjectInvalidUsageError, + ProjectNodeRequiredInputsNotSetError, + ProjectNotFoundError, + ProjectOwnerNotFoundInTheProjectAccessRightsError, + ProjectStartsTooManyDynamicNodesError, + ProjectTooManyProjectOpenedError, + ProjectWalletPendingTransactionError, + WrongTagIdsInQueryError, +) + +_logger = logging.getLogger(__name__) + + +_FOLDER_ERRORS: ExceptionToHttpErrorMap = { + FolderAccessForbiddenError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Access to folder forbidden", + ), + FolderNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Folder not found: {reason}", + ), +} + + +_NODE_ERRORS: ExceptionToHttpErrorMap = { + NodeNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Node '{node_uuid}' not found in project '{project_uuid}'", + ), + ParentNodeNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Parent node '{node_uuid}' not found", + ), + ProjectNodeRequiredInputsNotSetError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Project node is required but input is not set", + ), +} + + +_PROJECT_ERRORS: ExceptionToHttpErrorMap = { + ProjectDeleteError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Failed to complete deletion of '{project_uuid}': {reason}", + ), + ProjectGroupNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Project group not found: {reason}", + ), + ProjectInvalidRightsError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Do not have sufficient access rights on project {project_uuid} for this action", + ), + ProjectInvalidUsageError: HttpErrorInfo( + status.HTTP_422_UNPROCESSABLE_ENTITY, + "Invalid usage for project", + ), + ProjectNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Project {project_uuid} not found", + ), + ProjectOwnerNotFoundInTheProjectAccessRightsError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + "Project owner identifier was not found in the project's access-rights field", + ), + ProjectTooManyProjectOpenedError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "You cannot open more than {max_num_projects} study/ies at once. Please close another study and retry.", + ), + ProjectStartsTooManyDynamicNodesError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "The maximal amount of concurrently running dynamic services was reached. Please manually stop a service and retry.", + ), + ProjectWalletPendingTransactionError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Project has currently pending transactions. It is forbidden to change wallet.", + ), + ProjectInDebtCanNotChangeWalletError: HttpErrorInfo( + status.HTTP_402_PAYMENT_REQUIRED, + "Unable to change the credit account linked to the project. The project is embargoed because the last transaction of {debt_amount} resulted in the credit account going negative.", + ), + ProjectInDebtCanNotOpenError: HttpErrorInfo( + status.HTTP_402_PAYMENT_REQUIRED, + "Unable to open the project. The project is embargoed because the last transaction of {debt_amount} resulted in the credit account going negative.", + ), + WrongTagIdsInQueryError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + "Wrong tag IDs in query", + ), +} + + +_WORKSPACE_ERRORS: ExceptionToHttpErrorMap = { + WorkspaceAccessForbiddenError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Access to workspace forbidden: {reason}", + ), + WorkspaceNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Workspace not found: {reason}", + ), +} + + +_WALLET_ERRORS: ExceptionToHttpErrorMap = { + UserDefaultWalletNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Wallet not found: {reason}", + ), + WalletAccessForbiddenError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Payment required, but the user lacks access to the project's linked wallet: Wallet access forbidden. {reason}", + ), + WalletNotEnoughCreditsError: HttpErrorInfo( + status.HTTP_402_PAYMENT_REQUIRED, + "Wallet does not have enough credits. {reason}", + ), +} + + +_PRICING_ERRORS: ExceptionToHttpErrorMap = { + DefaultPricingPlanNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Default pricing plan not found", + ), + DefaultPricingUnitNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Default pricing unit not found", + ), +} + + +_OTHER_ERRORS: ExceptionToHttpErrorMap = { + CatalogNotAvailableError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + "This service is currently not available", + ), + ClustersKeeperNotAvailableError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + "Clusters-keeper service is not available", + ), + CatalogForbiddenError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Catalog forbidden: Insufficient access rights for {name}", + ), + CatalogItemNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, "{name} was not found" + ), +} + + +_ERRORS = [ + _FOLDER_ERRORS, + _NODE_ERRORS, + _OTHER_ERRORS, + _PRICING_ERRORS, + _PROJECT_ERRORS, + _WALLET_ERRORS, + _WORKSPACE_ERRORS, +] + + +def _assert_duplicate(): + duplicates = { + exc.__name__: count + for exc, count in Counter(itertools.chain(*[d.keys() for d in _ERRORS])).items() + if count > 1 + } + if duplicates: + msg = f"Found duplicated exceptions: {duplicates}" + raise AssertionError(msg) + return True + + +assert _assert_duplicate() # nosec + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + k: v for dikt in _ERRORS for k, v in dikt.items() +} + + +handle_plugin_requests_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/projects/_common/models.py b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_schemas.py similarity index 70% rename from services/web/server/src/simcore_service_webserver/projects/_common/models.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/_rest_schemas.py index 6f358378f60..9618a73bb4c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_schemas.py @@ -1,11 +1,9 @@ -""" Handlers for STANDARD methods on /projects colletions - -Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete - -""" - from models_library.projects import ProjectID -from pydantic import BaseModel, ConfigDict, Field +from pydantic import ( + BaseModel, + ConfigDict, + Field, +) from ...models import RequestContext diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_utils.py b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_utils.py new file mode 100644 index 00000000000..beab5959668 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_utils.py @@ -0,0 +1,48 @@ +from aiohttp import web +from models_library.api_schemas_webserver.projects import ProjectListItem +from models_library.rest_pagination import Page +from models_library.rest_pagination_utils import paginate_data +from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.rest_constants import RESPONSE_MODEL_POLICY + +from .. import _permalink_service +from .._crud_api_read import _paralell_update +from ..models import ProjectDict + + +async def aggregate_data_to_projects_from_request( + request: web.Request, + projects: list[ProjectDict], +) -> list[ProjectDict]: + + update_permalink_per_project = [ + # permalink + _permalink_service.aggregate_permalink_in_project(request, project=prj) + for prj in projects + ] + + updated_projects: list[ProjectDict] = await _paralell_update( + *update_permalink_per_project, + ) + return updated_projects + + +def create_page_response(projects, request_url, total, limit, offset) -> web.Response: + page = Page[ProjectListItem].model_validate( + paginate_data( + chunk=[ + ProjectListItem.from_domain_model(prj).model_dump( + by_alias=True, exclude_unset=True + ) + for prj in projects + ], + request_url=request_url, + total=total, + limit=limit, + offset=offset, + ) + ) + return web.Response( + text=page.model_dump_json(**RESPONSE_MODEL_POLICY), + content_type=MIMETYPE_APPLICATION_JSON, + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/comments_rest.py similarity index 78% rename from services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/comments_rest.py index 04ac3d5ca35..183cf1fa3b6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/comments_rest.py @@ -1,8 +1,3 @@ -""" Handlers for project comments operations - -""" - -import functools import logging from typing import Any @@ -22,35 +17,19 @@ parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.rest_constants import RESPONSE_MODEL_POLICY -from .._meta import API_VTAG as VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import _comments_api, projects_service -from ._common.models import RequestContext -from .exceptions import ProjectInvalidRightsError, ProjectNotFoundError +from ..._meta import API_VTAG as VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _comments_service, _projects_service +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import RequestContext _logger = logging.getLogger(__name__) - -def _handle_project_comments_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - except ProjectInvalidRightsError as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return wrapper - - # # projects/*/comments COLLECTION ------------------------- # @@ -79,21 +58,21 @@ class _ProjectCommentsBodyParams(BaseModel): ) @login_required @permission_required("project.read") -@_handle_project_comments_exceptions +@handle_plugin_requests_exceptions async def create_project_comment(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectCommentsPathParams, request) body_params = await parse_request_body_as(_ProjectCommentsBodyParams, request) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_uuid}", user_id=req_ctx.user_id, include_state=False, ) - comment_id = await _comments_api.create_project_comment( + comment_id = await _comments_service.create_project_comment( request=request, project_uuid=path_params.project_uuid, user_id=req_ctx.user_id, @@ -119,7 +98,7 @@ class _ListProjectCommentsQueryParams(BaseModel): @routes.get(f"/{VTAG}/projects/{{project_uuid}}/comments", name="list_project_comments") @login_required @permission_required("project.read") -@_handle_project_comments_exceptions +@handle_plugin_requests_exceptions async def list_project_comments(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectCommentsPathParams, request) @@ -128,19 +107,19 @@ async def list_project_comments(request: web.Request): ) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_uuid}", user_id=req_ctx.user_id, include_state=False, ) - total_project_comments = await _comments_api.total_project_comments( + total_project_comments = await _comments_service.total_project_comments( request=request, project_uuid=path_params.project_uuid, ) - project_comments = await _comments_api.list_project_comments( + project_comments = await _comments_service.list_project_comments( request=request, project_uuid=path_params.project_uuid, offset=query_params.offset, @@ -168,6 +147,7 @@ async def list_project_comments(request: web.Request): ) @login_required @permission_required("project.read") +@handle_plugin_requests_exceptions async def update_project_comment(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( @@ -176,19 +156,20 @@ async def update_project_comment(request: web.Request): body_params = await parse_request_body_as(_ProjectCommentsBodyParams, request) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_uuid}", user_id=req_ctx.user_id, include_state=False, ) - return await _comments_api.update_project_comment( + updated_comment = await _comments_service.update_project_comment( request=request, comment_id=path_params.comment_id, project_uuid=path_params.project_uuid, contents=body_params.contents, ) + return envelope_json_response(updated_comment) @routes.delete( @@ -197,7 +178,7 @@ async def update_project_comment(request: web.Request): ) @login_required @permission_required("project.read") -@_handle_project_comments_exceptions +@handle_plugin_requests_exceptions async def delete_project_comment(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( @@ -205,14 +186,14 @@ async def delete_project_comment(request: web.Request): ) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_uuid}", user_id=req_ctx.user_id, include_state=False, ) - await _comments_api.delete_project_comment( + await _comments_service.delete_project_comment( request=request, comment_id=path_params.comment_id, ) @@ -225,7 +206,7 @@ async def delete_project_comment(request: web.Request): ) @login_required @permission_required("project.read") -@_handle_project_comments_exceptions +@handle_plugin_requests_exceptions async def get_project_comment(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( @@ -233,14 +214,15 @@ async def get_project_comment(request: web.Request): ) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_uuid}", user_id=req_ctx.user_id, include_state=False, ) - return await _comments_api.get_project_comment( + comment = await _comments_service.get_project_comment( request=request, comment_id=path_params.comment_id, ) + return envelope_json_response(comment) diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/folders_rest.py similarity index 60% rename from services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/folders_rest.py index c4f1828237b..7d7a7f6f954 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/folders_rest.py @@ -1,4 +1,3 @@ -import functools import logging from aiohttp import web @@ -8,33 +7,17 @@ from pydantic import BaseModel, ConfigDict, field_validator from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as -from servicelib.aiohttp.typing_extension import Handler -from .._meta import api_version_prefix as VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from . import _folders_api -from ._common.models import RequestContext -from .exceptions import ProjectGroupNotFoundError, ProjectNotFoundError +from ..._meta import api_version_prefix as VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from .. import _folders_service +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import RequestContext _logger = logging.getLogger(__name__) -def _handle_projects_folders_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ProjectGroupNotFoundError as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except ProjectNotFoundError as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return wrapper - - routes = web.RouteTableDef() @@ -55,12 +38,12 @@ class _ProjectsFoldersPathParams(BaseModel): ) @login_required @permission_required("project.folders.*") -@_handle_projects_folders_exceptions +@handle_plugin_requests_exceptions async def replace_project_folder(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsFoldersPathParams, request) - await _folders_api.move_project_into_folder( + await _folders_service.move_project_into_folder( app=request.app, user_id=req_ctx.user_id, project_id=path_params.project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/groups_rest.py similarity index 66% rename from services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/groups_rest.py index d507a2b1eff..7d79461144a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/groups_rest.py @@ -1,8 +1,3 @@ -""" Handlers for project comments operations - -""" - -import functools import logging from aiohttp import web @@ -14,39 +9,19 @@ parse_request_body_as, parse_request_path_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler -from .._meta import api_version_prefix as VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import _groups_api -from ._common.models import ProjectPathParams, RequestContext -from ._groups_api import ProjectGroupGet -from .exceptions import ProjectGroupNotFoundError, ProjectNotFoundError +from ..._meta import api_version_prefix as VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _groups_service +from .._groups_service import ProjectGroupGet +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ProjectPathParams, RequestContext _logger = logging.getLogger(__name__) -def _handle_projects_groups_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ProjectGroupNotFoundError as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except ProjectNotFoundError as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return wrapper - - -# -# projects groups COLLECTION ------------------------- -# - routes = web.RouteTableDef() @@ -68,13 +43,13 @@ class _ProjectsGroupsBodyParams(BaseModel): ) @login_required @permission_required("project.access_rights.update") -@_handle_projects_groups_exceptions +@handle_plugin_requests_exceptions async def create_project_group(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsGroupsPathParams, request) body_params = await parse_request_body_as(_ProjectsGroupsBodyParams, request) - project_groups: ProjectGroupGet = await _groups_api.create_project_group( + project_groups: ProjectGroupGet = await _groups_service.create_project_group( request.app, user_id=req_ctx.user_id, project_id=path_params.project_id, @@ -91,18 +66,18 @@ async def create_project_group(request: web.Request): @routes.get(f"/{VTAG}/projects/{{project_id}}/groups", name="list_project_groups") @login_required @permission_required("project.read") -@_handle_projects_groups_exceptions +@handle_plugin_requests_exceptions async def list_project_groups(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) - project_groups: list[ - ProjectGroupGet - ] = await _groups_api.list_project_groups_by_user_and_project( - request.app, - user_id=req_ctx.user_id, - project_id=path_params.project_id, - product_name=req_ctx.product_name, + project_groups: list[ProjectGroupGet] = ( + await _groups_service.list_project_groups_by_user_and_project( + request.app, + user_id=req_ctx.user_id, + project_id=path_params.project_id, + product_name=req_ctx.product_name, + ) ) return envelope_json_response(project_groups, web.HTTPOk) @@ -114,13 +89,13 @@ async def list_project_groups(request: web.Request): ) @login_required @permission_required("project.access_rights.update") -@_handle_projects_groups_exceptions +@handle_plugin_requests_exceptions async def replace_project_group(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsGroupsPathParams, request) body_params = await parse_request_body_as(_ProjectsGroupsBodyParams, request) - return await _groups_api.replace_project_group( + return await _groups_service.replace_project_group( app=request.app, user_id=req_ctx.user_id, project_id=path_params.project_id, @@ -138,12 +113,12 @@ async def replace_project_group(request: web.Request): ) @login_required @permission_required("project.access_rights.update") -@_handle_projects_groups_exceptions +@handle_plugin_requests_exceptions async def delete_project_group(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsGroupsPathParams, request) - await _groups_api.delete_project_group( + await _groups_service.delete_project_group( app=request.app, user_id=req_ctx.user_id, project_id=path_params.project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/metadata_rest.py similarity index 60% rename from services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/metadata_rest.py index df139c6fd30..f03b711f56d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/metadata_rest.py @@ -10,7 +10,6 @@ - Get and Update methods only """ -import functools import logging from aiohttp import web @@ -22,67 +21,33 @@ parse_request_body_as, parse_request_path_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler from servicelib.logging_utils import log_catch -from .._meta import api_version_prefix -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import _metadata_api -from ._common.models import ProjectPathParams, RequestContext -from .exceptions import ( - NodeNotFoundError, - ParentNodeNotFoundError, - ProjectInvalidRightsError, - ProjectInvalidUsageError, - ProjectNotFoundError, -) +from ..._meta import api_version_prefix +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _metadata_service +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ProjectPathParams, RequestContext routes = web.RouteTableDef() _logger = logging.getLogger(__name__) -def _handle_project_exceptions(handler: Handler): - """Transforms project errors -> http errors""" - - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ( - ProjectNotFoundError, - NodeNotFoundError, - ParentNodeNotFoundError, - ) as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - except ProjectInvalidRightsError as exc: - raise web.HTTPUnauthorized(reason=f"{exc}") from exc - except ProjectInvalidUsageError as exc: - raise web.HTTPUnprocessableEntity(reason=f"{exc}") from exc - - return wrapper - - -# -# projects/*/custom-metadata -# - - @routes.get( f"/{api_version_prefix}/projects/{{project_id}}/metadata", name="get_project_metadata", ) @login_required @permission_required("project.read") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def get_project_metadata(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) - custom_metadata = await _metadata_api.get_project_custom_metadata( + custom_metadata = await _metadata_service.get_project_custom_metadata( request.app, user_id=req_ctx.user_id, project_uuid=path_params.project_id ) @@ -97,20 +62,20 @@ async def get_project_metadata(request: web.Request) -> web.Response: ) @login_required @permission_required("project.update") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def update_project_metadata(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) update = await parse_request_body_as(ProjectMetadataUpdate, request) - custom_metadata = await _metadata_api.set_project_custom_metadata( + custom_metadata = await _metadata_service.set_project_custom_metadata( request.app, user_id=req_ctx.user_id, project_uuid=path_params.project_id, value=update.custom, ) with log_catch(_logger, reraise=False): - await _metadata_api.set_project_ancestors_from_custom_metadata( + await _metadata_service.set_project_ancestors_from_custom_metadata( request.app, user_id=req_ctx.user_id, project_uuid=path_params.project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_pricing_unit_rest.py similarity index 73% rename from services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/nodes_pricing_unit_rest.py index 66b17383bba..6476389c4d7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_pricing_unit_rest.py @@ -1,8 +1,3 @@ -""" Handlers for CRUD operations on /projects/{*}/nodes/{*}/pricing-unit - -""" - -import functools import logging from aiohttp import web @@ -13,45 +8,28 @@ from models_library.resource_tracker import PricingPlanId, PricingUnitId from pydantic import BaseModel, ConfigDict from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as -from servicelib.aiohttp.typing_extension import Handler - -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..resource_usage import service as rut_api -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import projects_service -from ._common.models import RequestContext -from ._nodes_handlers import NodePathParams -from .db import ProjectDBAPI -from .exceptions import ProjectInvalidRightsError, ProjectNotFoundError + +from ..._meta import API_VTAG +from ...login.decorators import login_required +from ...resource_usage import service as rut_api +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _projects_service +from .._projects_repository_legacy import ProjectDBAPI +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import RequestContext +from .nodes_rest import NodePathParams _logger = logging.getLogger(__name__) -class PricingUnitError(OsparcErrorMixin, ValueError): - ... +class PricingUnitError(OsparcErrorMixin, ValueError): ... class PricingUnitNotFoundError(PricingUnitError): msg_template = "Pricing unit not found" -def _handle_projects_nodes_pricing_unit_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except (PricingUnitNotFoundError, ProjectInvalidRightsError) as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return wrapper - - routes = web.RouteTableDef() @@ -61,14 +39,14 @@ async def wrapper(request: web.Request) -> web.StreamResponse: ) @login_required @permission_required("project.wallet.*") -@_handle_projects_nodes_pricing_unit_exceptions +@handle_plugin_requests_exceptions async def get_project_node_pricing_unit(request: web.Request): db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -108,7 +86,7 @@ class _ProjectNodePricingUnitPathParams(BaseModel): ) @login_required @permission_required("project.wallet.*") -@_handle_projects_nodes_pricing_unit_exceptions +@handle_plugin_requests_exceptions async def connect_pricing_unit_to_project_node(request: web.Request): db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) req_ctx = RequestContext.model_validate(request) @@ -117,7 +95,7 @@ async def connect_pricing_unit_to_project_node(request: web.Request): ) # ensure the project exists - project = await projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -143,7 +121,7 @@ async def connect_pricing_unit_to_project_node(request: web.Request): node_data = project["workbench"][NodeIDStr(f"{path_params.node_id}")] - await projects_service.update_project_node_resources_from_hardware_info( + await _projects_service.update_project_node_resources_from_hardware_info( request.app, user_id=req_ctx.user_id, project_id=path_params.project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py similarity index 79% rename from services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py index 44ac05a12c7..9642ba581d3 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py @@ -1,9 +1,4 @@ -""" Handlers for CRUD operations on /projects/{*}/nodes/{*} - -""" - import asyncio -import functools import logging from aiohttp import web @@ -11,6 +6,7 @@ from models_library.api_schemas_catalog.service_access_rights import ( ServiceAccessRightsGet, ) +from models_library.api_schemas_catalog.services import MyServiceGet from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStop, @@ -24,12 +20,15 @@ NodeOutputs, NodePatch, NodeRetrieve, + NodeServiceGet, + ProjectNodeServicesGet, ) from models_library.groups import EVERYONE_GROUP_ID, Group, GroupID, GroupType from models_library.projects import Project, ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceKeyVersion from models_library.services_resources import ServiceResourcesDict +from models_library.services_types import ServiceKey, ServiceVersion from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import BaseModel, Field from servicelib.aiohttp import status @@ -42,17 +41,12 @@ parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler from servicelib.common_headers import ( UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_USER_AGENT, ) from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.rabbitmq import RPCServerError -from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( - CatalogForbiddenError, - CatalogItemNotFoundError, -) from servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.errors import ( ServiceWaitingForManualInterventionError, ServiceWasNotFoundError, @@ -60,77 +54,30 @@ from servicelib.services_utils import get_status_as_dict from simcore_postgres_database.models.users import UserRole -from .._meta import API_VTAG as VTAG -from ..catalog import client as catalog_client -from ..dynamic_scheduler import api as dynamic_scheduler_api -from ..groups.api import get_group_from_gid, list_all_user_groups_ids -from ..groups.exceptions import GroupNotFoundError -from ..login.decorators import login_required -from ..projects.api import has_user_project_access_rights -from ..resource_usage.errors import DefaultPricingPlanNotFoundError -from ..security.decorators import permission_required -from ..users.api import get_user_id_from_gid, get_user_role -from ..users.exceptions import UserDefaultWalletNotFoundError -from ..utils_aiohttp import envelope_json_response -from ..wallets.errors import WalletAccessForbiddenError, WalletNotEnoughCreditsError -from . import nodes_utils, projects_service -from ._common.models import ProjectPathParams, RequestContext -from ._nodes_api import NodeScreenshot, get_node_screenshots -from .exceptions import ( - ClustersKeeperNotAvailableError, - DefaultPricingUnitNotFoundError, +from ..._meta import API_VTAG as VTAG +from ...catalog import catalog_service +from ...dynamic_scheduler import api as dynamic_scheduler_service +from ...groups.api import get_group_from_gid, list_all_user_groups_ids +from ...groups.exceptions import GroupNotFoundError +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...users.api import get_user_id_from_gid, get_user_role +from ...utils_aiohttp import envelope_json_response +from .. import _access_rights_service as access_rights_service +from .. import _nodes_service, _projects_service, nodes_utils +from .._nodes_service import NodeScreenshot, get_node_screenshots +from ..api import has_user_project_access_rights +from ..exceptions import ( NodeNotFoundError, - ProjectInDebtCanNotChangeWalletError, - ProjectInvalidRightsError, - ProjectNodeRequiredInputsNotSetError, ProjectNodeResourcesInsufficientRightsError, ProjectNodeResourcesInvalidError, - ProjectNotFoundError, - ProjectStartsTooManyDynamicNodesError, ) +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ProjectPathParams, RequestContext _logger = logging.getLogger(__name__) -def _handle_project_nodes_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ( - ProjectNotFoundError, - NodeNotFoundError, - UserDefaultWalletNotFoundError, - DefaultPricingPlanNotFoundError, - DefaultPricingUnitNotFoundError, - GroupNotFoundError, - CatalogItemNotFoundError, - ) as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - except ( - WalletNotEnoughCreditsError, - ProjectInDebtCanNotChangeWalletError, - ) as exc: - raise web.HTTPPaymentRequired(reason=f"{exc}") from exc - except ProjectInvalidRightsError as exc: - raise web.HTTPUnauthorized(reason=f"{exc}") from exc - except ProjectStartsTooManyDynamicNodesError as exc: - raise web.HTTPConflict(reason=f"{exc}") from exc - except ClustersKeeperNotAvailableError as exc: - raise web.HTTPServiceUnavailable(reason=f"{exc}") from exc - except ProjectNodeRequiredInputsNotSetError as exc: - raise web.HTTPConflict(reason=f"{exc}") from exc - except CatalogForbiddenError as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - except WalletAccessForbiddenError as exc: - raise web.HTTPForbidden( - reason=f"Payment required, but the user lacks access to the project's linked wallet.: {exc}" - ) from exc - - return wrapper - - # # projects/*/nodes COLLECTION ------------------------- # @@ -145,13 +92,13 @@ class NodePathParams(ProjectPathParams): @routes.post(f"/{VTAG}/projects/{{project_id}}/nodes", name="create_node") @login_required @permission_required("project.node.create") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def create_node(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) body = await parse_request_body_as(NodeCreate, request) - if await projects_service.is_service_deprecated( + if await _projects_service.is_service_deprecated( request.app, req_ctx.user_id, body.service_key, @@ -163,13 +110,13 @@ async def create_node(request: web.Request) -> web.Response: ) # ensure the project exists - project_data = await projects_service.get_project_for_user( + project_data = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, ) data = { - "node_id": await projects_service.add_project_node( + "node_id": await _projects_service.add_project_node( request, project_data, req_ctx.user_id, @@ -187,20 +134,20 @@ async def create_node(request: web.Request) -> web.Response: @routes.get(f"/{VTAG}/projects/{{project_id}}/nodes/{{node_id}}", name="get_node") @login_required @permission_required("project.node.read") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions # NOTE: Careful, this endpoint is actually "get_node_state," and it doesn't return a Node resource. async def get_node(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists - project = await projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, ) - if await projects_service.is_project_node_deprecated( + if await _projects_service.is_project_node_deprecated( request.app, req_ctx.user_id, project, @@ -213,7 +160,7 @@ async def get_node(request: web.Request) -> web.Response: ) service_data: NodeGetIdle | NodeGetUnknown | DynamicServiceGet | NodeGet = ( - await dynamic_scheduler_api.get_dynamic_service( + await dynamic_scheduler_service.get_dynamic_service( app=request.app, node_id=path_params.node_id ) ) @@ -226,13 +173,13 @@ async def get_node(request: web.Request) -> web.Response: ) @login_required @permission_required("project.node.update") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def patch_project_node(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) node_patch = await parse_request_body_as(NodePatch, request) - await projects_service.patch_project_node( + await _projects_service.patch_project_node( request.app, product_name=req_ctx.product_name, user_id=req_ctx.user_id, @@ -247,18 +194,18 @@ async def patch_project_node(request: web.Request) -> web.Response: @routes.delete(f"/{VTAG}/projects/{{project_id}}/nodes/{{node_id}}", name="delete_node") @login_required @permission_required("project.node.delete") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def delete_node(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, ) - await projects_service.delete_project_node( + await _projects_service.delete_project_node( request, path_params.project_id, req_ctx.user_id, @@ -275,14 +222,14 @@ async def delete_node(request: web.Request) -> web.Response: ) @login_required @permission_required("project.node.read") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def retrieve_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" path_params = parse_request_path_parameters_as(NodePathParams, request) retrieve = await parse_request_body_as(NodeRetrieve, request) return web.json_response( - await dynamic_scheduler_api.retrieve_inputs( + await dynamic_scheduler_service.retrieve_inputs( request.app, path_params.node_id, retrieve.port_keys ), dumps=json_dumps, @@ -295,7 +242,7 @@ async def retrieve_node(request: web.Request) -> web.Response: ) @login_required @permission_required("project.node.update") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def update_node_outputs(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) @@ -322,13 +269,13 @@ async def update_node_outputs(request: web.Request) -> web.Response: ) @login_required @permission_required("project.update") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def start_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) - await projects_service.start_project_node( + await _projects_service.start_project_node( request, product_name=req_ctx.product_name, user_id=req_ctx.user_id, @@ -347,7 +294,7 @@ async def _stop_dynamic_service_task( ): # NOTE: _handle_project_nodes_exceptions only decorate handlers try: - await dynamic_scheduler_api.stop_dynamic_service( + await dynamic_scheduler_service.stop_dynamic_service( app, dynamic_service_stop=dynamic_service_stop ) return web.json_response(status=status.HTTP_204_NO_CONTENT) @@ -366,7 +313,7 @@ async def _stop_dynamic_service_task( ) @login_required @permission_required("project.update") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def stop_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" req_ctx = RequestContext.model_validate(request) @@ -408,13 +355,13 @@ async def stop_node(request: web.Request) -> web.Response: ) @login_required @permission_required("project.node.read") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def restart_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" path_params = parse_request_path_parameters_as(NodePathParams, request) - await dynamic_scheduler_api.restart_user_services( + await dynamic_scheduler_service.restart_user_services( request.app, node_id=path_params.node_id ) @@ -432,13 +379,13 @@ async def restart_node(request: web.Request) -> web.Response: ) @login_required @permission_required("project.node.read") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def get_node_resources(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists - project = await projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -448,13 +395,15 @@ async def get_node_resources(request: web.Request) -> web.Response: node_id = f"{path_params.node_id}" raise NodeNotFoundError(project_uuid=project_uuid, node_uuid=node_id) - resources: ServiceResourcesDict = await projects_service.get_project_node_resources( - request.app, - user_id=req_ctx.user_id, - project_id=path_params.project_id, - node_id=path_params.node_id, - service_key=project["workbench"][f"{path_params.node_id}"]["key"], - service_version=project["workbench"][f"{path_params.node_id}"]["version"], + resources: ServiceResourcesDict = ( + await _projects_service.get_project_node_resources( + request.app, + user_id=req_ctx.user_id, + project_id=path_params.project_id, + node_id=path_params.node_id, + service_key=project["workbench"][f"{path_params.node_id}"]["key"], + service_version=project["workbench"][f"{path_params.node_id}"]["version"], + ) ) return envelope_json_response(resources) @@ -465,14 +414,14 @@ async def get_node_resources(request: web.Request) -> web.Response: ) @login_required @permission_required("project.node.update") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def replace_node_resources(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) body = await parse_request_body_as(ServiceResourcesDict, request) # ensure the project exists - project = await projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -482,7 +431,7 @@ async def replace_node_resources(request: web.Request) -> web.Response: project_uuid=f"{path_params.project_id}", node_uuid=f"{path_params.node_id}" ) try: - new_node_resources = await projects_service.update_project_node_resources( + new_node_resources = await _projects_service.update_project_node_resources( request.app, user_id=req_ctx.user_id, project_id=path_params.project_id, @@ -518,23 +467,64 @@ class _ProjectGroupAccess(BaseModel): inaccessible_services: list[ServiceKeyVersion] | None = Field(default=None) +@routes.get( + f"/{VTAG}/projects/{{project_id}}/nodes/-/services", + name="get_project_services", +) +@login_required +@permission_required("project.read") +@handle_plugin_requests_exceptions +async def get_project_services(request: web.Request) -> web.Response: + req_ctx = RequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(ProjectPathParams, request) + + await access_rights_service.check_user_project_permission( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=path_params.project_id, + permission="read", + ) + + services_in_project: list[tuple[ServiceKey, ServiceVersion]] = ( + await _nodes_service.get_project_nodes_services( + request.app, project_uuid=path_params.project_id + ) + ) + + services: list[MyServiceGet] = await catalog_service.batch_get_my_services( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + services_ids=services_in_project, + ) + + return envelope_json_response( + ProjectNodeServicesGet( + project_uuid=path_params.project_id, + services=[ + NodeServiceGet.model_validate(sv, from_attributes=True) + for sv in services + ], + ) + ) + + @routes.get( f"/{VTAG}/projects/{{project_id}}/nodes/-/services:access", name="get_project_services_access_for_gid", ) @login_required @permission_required("project.read") -@_handle_project_nodes_exceptions -async def get_project_services_access_for_gid( - request: web.Request, -) -> web.Response: +@handle_plugin_requests_exceptions +async def get_project_services_access_for_gid(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) query_params: _ServicesAccessQuery = parse_request_query_parameters_as( _ServicesAccessQuery, request ) - project = await projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -547,7 +537,7 @@ async def get_project_services_access_for_gid( project_services_access_rights: list[ServiceAccessRightsGet] = await asyncio.gather( *[ - catalog_client.get_service_access_rights( + catalog_service.get_service_access_rights( app=request.app, user_id=req_ctx.user_id, service_key=service.key, @@ -644,14 +634,14 @@ class _ProjectNodePreview(BaseModel): ) @login_required @permission_required("project.read") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def list_project_nodes_previews(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert req_ctx # nosec nodes_previews: list[_ProjectNodePreview] = [] - project_data = await projects_service.get_project_for_user( + project_data = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -684,13 +674,13 @@ async def list_project_nodes_previews(request: web.Request) -> web.Response: ) @login_required @permission_required("project.read") -@_handle_project_nodes_exceptions +@handle_plugin_requests_exceptions async def get_project_node_preview(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) assert req_ctx # nosec - project_data = await projects_service.get_project_for_user( + project_data = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py similarity index 71% rename from services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py index b134929a8af..396ba2bbae7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py @@ -1,15 +1,7 @@ -""" Handlers for some CRUD operations for - - /projects/{*}/inputs - - /projects/{*}/outputs -""" - -import functools import logging -from collections.abc import Awaitable, Callable from typing import Any, Literal from aiohttp import web -from common_library.json_serialization import json_dumps from models_library.api_schemas_webserver.projects_ports import ( ProjectInputGet, ProjectInputUpdate, @@ -28,60 +20,24 @@ parse_request_path_parameters_as, ) -from .._meta import API_VTAG as VTAG -from ..login.decorators import login_required -from ..projects._access_rights_api import check_user_project_permission -from ..security.decorators import permission_required -from . import _ports_api, projects_service -from ._common.models import ProjectPathParams, RequestContext -from .db import ProjectDBAPI -from .exceptions import ( - NodeNotFoundError, - ProjectInvalidRightsError, - ProjectNotFoundError, -) -from .models import ProjectDict +from ..._meta import API_VTAG as VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _ports_service, _projects_service +from .._access_rights_service import check_user_project_permission +from .._projects_repository_legacy import ProjectDBAPI +from ..models import ProjectDict +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ProjectPathParams, RequestContext log = logging.getLogger(__name__) -def _web_json_response_enveloped(data: Any) -> web.Response: - return web.json_response( - { - "data": jsonable_encoder(data), - }, - dumps=json_dumps, - ) - - -def _handle_project_exceptions( - handler: Callable[[web.Request], Awaitable[web.Response]] -) -> Callable[[web.Request], Awaitable[web.Response]]: - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.Response: - try: - return await handler(request) - - except ProjectNotFoundError as exc: - raise web.HTTPNotFound( - reason=f"Project '{exc.project_uuid}' not found" - ) from exc - - except ProjectInvalidRightsError as exc: - raise web.HTTPUnauthorized from exc - - except NodeNotFoundError as exc: - raise web.HTTPNotFound( - reason=f"Port '{exc.node_uuid}' not found in node '{exc.project_uuid}'" - ) from exc - - return wrapper - - async def _get_validated_workbench_model( app: web.Application, project_id: ProjectID, user_id: UserID ) -> dict[NodeID, Node]: - project: ProjectDict = await projects_service.get_project_for_user( + project: ProjectDict = await _projects_service.get_project_for_user( app, project_uuid=f"{project_id}", user_id=user_id, @@ -101,7 +57,7 @@ async def _get_validated_workbench_model( @routes.get(f"/{VTAG}/projects/{{project_id}}/inputs", name="get_project_inputs") @login_required @permission_required("project.read") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def get_project_inputs(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) @@ -111,10 +67,10 @@ async def get_project_inputs(request: web.Request) -> web.Response: workbench = await _get_validated_workbench_model( app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id ) - inputs: dict[NodeID, Any] = _ports_api.get_project_inputs(workbench) + inputs: dict[NodeID, Any] = _ports_service.get_project_inputs(workbench) - return _web_json_response_enveloped( - data={ + return envelope_json_response( + { node_id: ProjectInputGet( key=node_id, label=workbench[node_id].label, value=value ) @@ -126,7 +82,7 @@ async def get_project_inputs(request: web.Request) -> web.Response: @routes.patch(f"/{VTAG}/projects/{{project_id}}/inputs", name="update_project_inputs") @login_required @permission_required("project.update") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def update_project_inputs(request: web.Request) -> web.Response: db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) req_ctx = RequestContext.model_validate(request) @@ -138,7 +94,7 @@ async def update_project_inputs(request: web.Request) -> web.Response: workbench = await _get_validated_workbench_model( app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id ) - current_inputs: dict[NodeID, Any] = _ports_api.get_project_inputs(workbench) + current_inputs: dict[NodeID, Any] = _ports_service.get_project_inputs(workbench) # build workbench patch partial_workbench_data = {} @@ -172,10 +128,10 @@ async def update_project_inputs(request: web.Request) -> web.Response: workbench = TypeAdapter(dict[NodeID, Node]).validate_python( updated_project["workbench"] ) - inputs: dict[NodeID, Any] = _ports_api.get_project_inputs(workbench) + inputs: dict[NodeID, Any] = _ports_service.get_project_inputs(workbench) - return _web_json_response_enveloped( - data={ + return envelope_json_response( + { node_id: ProjectInputGet( key=node_id, label=workbench[node_id].label, value=value ) @@ -192,7 +148,7 @@ async def update_project_inputs(request: web.Request) -> web.Response: @routes.get(f"/{VTAG}/projects/{{project_id}}/outputs", name="get_project_outputs") @login_required @permission_required("project.read") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def get_project_outputs(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) @@ -202,12 +158,12 @@ async def get_project_outputs(request: web.Request) -> web.Response: workbench = await _get_validated_workbench_model( app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id ) - outputs: dict[NodeID, Any] = await _ports_api.get_project_outputs( + outputs: dict[NodeID, Any] = await _ports_service.get_project_outputs( request.app, project_id=path_params.project_id, workbench=workbench ) - return _web_json_response_enveloped( - data={ + return envelope_json_response( + { node_id: ProjectOutputGet( key=node_id, label=workbench[node_id].label, value=value ) @@ -239,7 +195,7 @@ class ProjectMetadataPortGet(BaseModel): ) @login_required @permission_required("project.read") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def list_project_metadata_ports(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) @@ -250,13 +206,13 @@ async def list_project_metadata_ports(request: web.Request) -> web.Response: app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id ) - return _web_json_response_enveloped( - data=[ + return envelope_json_response( + [ ProjectMetadataPortGet( key=port.node_id, kind=port.kind, content_schema=port.get_schema(), ) - for port in _ports_api.iter_project_ports(workbench) + for port in _ports_service.iter_project_ports(workbench) ] ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py similarity index 55% rename from services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py index aa2163bdb04..f20360d1cc0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py @@ -1,10 +1,3 @@ -"""Handlers for STANDARD methods on /projects colletions - -Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete - -""" - -import functools import logging from aiohttp import web @@ -14,14 +7,11 @@ ProjectCopyOverride, ProjectCreateNew, ProjectGet, - ProjectListItem, ProjectPatch, ) from models_library.generics import Envelope from models_library.projects_state import ProjectLocked from models_library.rest_ordering import OrderBy -from models_library.rest_pagination import Page -from models_library.rest_pagination_utils import paginate_data from models_library.utils.fastapi_encoders import jsonable_encoder from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.server import start_long_running_task @@ -31,44 +21,39 @@ parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler from servicelib.common_headers import ( UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_USER_AGENT, ) -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.redis import get_project_locked_state -from servicelib.rest_constants import RESPONSE_MODEL_POLICY - -from .._meta import API_VTAG as VTAG -from ..catalog.client import get_services_for_user_in_product -from ..folders.errors import FolderAccessForbiddenError, FolderNotFoundError -from ..login.decorators import login_required -from ..redis import get_redis_lock_manager_client_sdk -from ..resource_manager.user_sessions import PROJECT_ID_KEY, managed_resource -from ..security.api import check_user_permission -from ..security.decorators import permission_required -from ..users.api import get_user_fullname -from ..workspaces.errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError -from . import _crud_api_create, _crud_api_read, projects_service -from ._common.models import ProjectPathParams, RequestContext -from ._crud_handlers_models import ( + +from ..._meta import API_VTAG as VTAG +from ...catalog import catalog_service +from ...login.decorators import login_required +from ...redis import get_redis_lock_manager_client_sdk +from ...resource_manager.user_sessions import PROJECT_ID_KEY, managed_resource +from ...security.api import check_user_permission +from ...security.decorators import permission_required +from ...users.api import get_user_fullname +from ...utils_aiohttp import envelope_json_response +from .. import _crud_api_create, _crud_api_read, _projects_service +from .._permalink_service import update_or_pop_permalink_in_project +from ..models import ProjectDict +from ..utils import get_project_unavailable_services, project_uses_available_services +from . import _rest_utils +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ( + ProjectPathParams, + RequestContext, +) +from .projects_rest_schemas import ( ProjectActiveQueryParams, ProjectCreateHeaders, - ProjectCreateParams, + ProjectCreateQueryParams, ProjectFilters, ProjectsListQueryParams, ProjectsSearchQueryParams, ) -from ._permalink_api import update_or_pop_permalink_in_project -from .exceptions import ( - ProjectDeleteError, - ProjectInvalidRightsError, - ProjectNotFoundError, - ProjectOwnerNotFoundInTheProjectAccessRightsError, - WrongTagIdsInQueryError, -) -from .utils import get_project_unavailable_services, project_uses_available_services # When the user requests a project with a repo, the working copy might differ from # the repo project. A middleware in the meta module (if active) will resolve @@ -76,37 +61,9 @@ # response needs to refer to the uuid of the request and this is passed through this request key RQ_REQUESTED_REPO_PROJECT_UUID_KEY = f"{__name__}.RQT_REQUESTED_REPO_PROJECT_UUID_KEY" - _logger = logging.getLogger(__name__) -def _handle_projects_exceptions(handler: Handler): - @functools.wraps(handler) - async def _wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ( - ProjectNotFoundError, - FolderNotFoundError, - WorkspaceNotFoundError, - ) as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - except ( - ProjectOwnerNotFoundInTheProjectAccessRightsError, - WrongTagIdsInQueryError, - ) as exc: - raise web.HTTPBadRequest(reason=f"{exc}") from exc - except ( - ProjectInvalidRightsError, - FolderAccessForbiddenError, - WorkspaceAccessForbiddenError, - ) as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return _wrapper - - routes = web.RouteTableDef() @@ -114,14 +71,14 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: @login_required @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def create_project(request: web.Request): # # - Create https://google.aip.dev/133 # req_ctx = RequestContext.model_validate(request) - query_params: ProjectCreateParams = parse_request_query_parameters_as( - ProjectCreateParams, request + query_params: ProjectCreateQueryParams = parse_request_query_parameters_as( + ProjectCreateQueryParams, request ) header_params = parse_request_headers_as(ProjectCreateHeaders, request) if query_params.as_template: # create template from @@ -131,26 +88,20 @@ async def create_project(request: web.Request): # this entrypoint are in reality multiple entrypoints in one, namely # :create, :copy (w/ and w/o override) # NOTE: see clone_project + predefined_project: ProjectDict | None if not request.can_read_body: # request w/o body predefined_project = None else: # request w/ body (I found cases in which body = {}) - project_create: ( - ProjectCreateNew | ProjectCopyOverride | EmptyModel - ) = await parse_request_body_as( - ProjectCreateNew | ProjectCopyOverride | EmptyModel, # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 - request, - ) - predefined_project = ( - project_create.model_dump( - exclude_unset=True, - by_alias=True, - exclude_none=True, + project_create: ProjectCreateNew | ProjectCopyOverride | EmptyModel = ( + await parse_request_body_as( + ProjectCreateNew | ProjectCopyOverride | EmptyModel, # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 + request, ) - or None ) + predefined_project = project_create.to_domain_model() or None return await start_long_running_task( request, @@ -172,31 +123,10 @@ async def create_project(request: web.Request): ) -def _create_page_response(projects, request_url, total, limit, offset) -> web.Response: - page = Page[ProjectListItem].model_validate( - paginate_data( - chunk=[ - ProjectListItem.from_domain_model(prj).model_dump( - by_alias=True, exclude_unset=True - ) - for prj in projects - ], - request_url=request_url, - total=total, - limit=limit, - offset=offset, - ) - ) - return web.Response( - text=page.model_dump_json(**RESPONSE_MODEL_POLICY), - content_type=MIMETYPE_APPLICATION_JSON, - ) - - @routes.get(f"/{VTAG}/projects", name="list_projects") @login_required @permission_required("project.read") -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def list_projects(request: web.Request): # # - List https://google.aip.dev/132 @@ -218,7 +148,7 @@ async def list_projects(request: web.Request): assert query_params.filters # nosec projects, total_number_of_projects = await _crud_api_read.list_projects( - request, + request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name, project_type=query_params.project_type, @@ -233,7 +163,11 @@ async def list_projects(request: web.Request): order_by=OrderBy.model_construct(**query_params.order_by.model_dump()), ) - return _create_page_response( + projects = await _rest_utils.aggregate_data_to_projects_from_request( + request, projects + ) + + return _rest_utils.create_page_response( projects=projects, request_url=request.url, total=total_number_of_projects, @@ -245,7 +179,7 @@ async def list_projects(request: web.Request): @routes.get(f"/{VTAG}/projects:search", name="list_projects_full_search") @login_required @permission_required("project.read") -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def list_projects_full_search(request: web.Request): req_ctx = RequestContext.model_validate(request) query_params: ProjectsSearchQueryParams = parse_request_query_parameters_as( @@ -257,7 +191,7 @@ async def list_projects_full_search(request: web.Request): tag_ids_list = query_params.tag_ids_list() projects, total_number_of_projects = await _crud_api_read.list_projects_full_depth( - request, + request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name, trashed=query_params.filters.trashed, @@ -269,7 +203,11 @@ async def list_projects_full_search(request: web.Request): order_by=OrderBy.model_construct(**query_params.order_by.model_dump()), ) - return _create_page_response( + projects = await _rest_utils.aggregate_data_to_projects_from_request( + request, projects + ) + + return _rest_utils.create_page_response( projects=projects, request_url=request.url, total=total_number_of_projects, @@ -281,7 +219,7 @@ async def list_projects_full_search(request: web.Request): @routes.get(f"/{VTAG}/projects/active", name="get_active_project") @login_required @permission_required("project.read") -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def get_active_project(request: web.Request) -> web.Response: # # - Get https://google.aip.dev/131 @@ -298,39 +236,35 @@ async def get_active_project(request: web.Request) -> web.Response: ProjectActiveQueryParams, request ) - try: - user_active_projects = [] - with managed_resource( - req_ctx.user_id, query_params.client_session_id, request.app - ) as rt: - # get user's projects - user_active_projects = await rt.find(PROJECT_ID_KEY) - - data = None - if user_active_projects: - project = await projects_service.get_project_for_user( - request.app, - project_uuid=user_active_projects[0], - user_id=req_ctx.user_id, - include_state=True, - include_trashed_by_primary_gid=True, - ) + user_active_projects = [] + with managed_resource( + req_ctx.user_id, query_params.client_session_id, request.app + ) as rt: + # get user's projects + user_active_projects = await rt.find(PROJECT_ID_KEY) - # updates project's permalink field - await update_or_pop_permalink_in_project(request, project) + data = None + if user_active_projects: + project = await _projects_service.get_project_for_user( + request.app, + project_uuid=user_active_projects[0], + user_id=req_ctx.user_id, + include_state=True, + include_trashed_by_primary_gid=True, + ) - data = ProjectGet.from_domain_model(project).data(exclude_unset=True) + # updates project's permalink field + await update_or_pop_permalink_in_project(request, project) - return web.json_response({"data": data}, dumps=json_dumps) + data = ProjectGet.from_domain_model(project).data(exclude_unset=True) - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason="Project not found") from exc + return envelope_json_response(data) @routes.get(f"/{VTAG}/projects/{{project_id}}", name="get_project") @login_required @permission_required("project.read") -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def get_project(request: web.Request): """ @@ -344,50 +278,42 @@ async def get_project(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) - user_available_services: list[dict] = await get_services_for_user_in_product( - request.app, req_ctx.user_id, req_ctx.product_name, only_key_versions=True + user_available_services: list[dict] = ( + await catalog_service.get_services_for_user_in_product( + request.app, req_ctx.user_id, req_ctx.product_name, only_key_versions=True + ) ) - try: - project = await projects_service.get_project_for_user( - request.app, - project_uuid=f"{path_params.project_id}", - user_id=req_ctx.user_id, - include_state=True, - include_trashed_by_primary_gid=True, + project = await _projects_service.get_project_for_user( + request.app, + project_uuid=f"{path_params.project_id}", + user_id=req_ctx.user_id, + include_state=True, + include_trashed_by_primary_gid=True, + ) + if not await project_uses_available_services(project, user_available_services): + unavilable_services = get_project_unavailable_services( + project, user_available_services ) - if not await project_uses_available_services(project, user_available_services): - unavilable_services = get_project_unavailable_services( - project, user_available_services - ) - formatted_services = ", ".join( - f"{service}:{version}" for service, version in unavilable_services - ) - # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden - raise web.HTTPNotFound( - reason=( - f"Project '{path_params.project_id}' uses unavailable services. Please ask " - f"for permission for the following services {formatted_services}" - ) + formatted_services = ", ".join( + f"{service}:{version}" for service, version in unavilable_services + ) + # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden + raise web.HTTPNotFound( + reason=( + f"Project '{path_params.project_id}' uses unavailable services. Please ask " + f"for permission for the following services {formatted_services}" ) + ) - if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): - project["uuid"] = new_uuid - - # Adds permalink - await update_or_pop_permalink_in_project(request, project) + if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): + project["uuid"] = new_uuid - data = ProjectGet.from_domain_model(project).data(exclude_unset=True) - return web.json_response({"data": data}, dumps=json_dumps) + # Adds permalink + await update_or_pop_permalink_in_project(request, project) - except ProjectInvalidRightsError as exc: - raise web.HTTPForbidden( - reason=f"You do not have sufficient rights to read project {path_params.project_id}" - ) from exc - except ProjectNotFoundError as exc: - raise web.HTTPNotFound( - reason=f"Project {path_params.project_id} not found" - ) from exc + data = ProjectGet.from_domain_model(project).data(exclude_unset=True) + return envelope_json_response(data) @routes.get( @@ -395,11 +321,11 @@ async def get_project(request: web.Request): ) @login_required @permission_required("project.read") -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def get_project_inactivity(request: web.Request): path_params = parse_request_path_parameters_as(ProjectPathParams, request) - project_inactivity = await projects_service.get_project_inactivity( + project_inactivity = await _projects_service.get_project_inactivity( app=request.app, project_id=path_params.project_id ) return web.json_response(Envelope(data=project_inactivity), dumps=json_dumps) @@ -409,7 +335,7 @@ async def get_project_inactivity(request: web.Request): @login_required @permission_required("project.update") @permission_required("services.pipeline.*") -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def patch_project(request: web.Request): # # Update https://google.aip.dev/134 @@ -418,7 +344,7 @@ async def patch_project(request: web.Request): path_params = parse_request_path_parameters_as(ProjectPathParams, request) project_patch = await parse_request_body_as(ProjectPatch, request) - await projects_service.patch_project( + await _projects_service.patch_project( request.app, user_id=req_ctx.user_id, project_uuid=path_params.project_id, @@ -432,7 +358,7 @@ async def patch_project(request: web.Request): @routes.delete(f"/{VTAG}/projects/{{project_id}}", name="delete_project") @login_required @permission_required("project.delete") -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def delete_project(request: web.Request): # Delete https://google.aip.dev/135 """ @@ -450,64 +376,52 @@ async def delete_project(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) - try: - await projects_service.get_project_for_user( - request.app, - project_uuid=f"{path_params.project_id}", - user_id=req_ctx.user_id, - ) - project_users: set[int] = set() - with managed_resource(req_ctx.user_id, None, request.app) as user_session: - project_users = { - s.user_id - for s in await user_session.find_users_of_resource( - request.app, PROJECT_ID_KEY, f"{path_params.project_id}" - ) - } - # that project is still in use - if req_ctx.user_id in project_users: - raise web.HTTPForbidden( - reason="Project is still open in another tab/browser." - "It cannot be deleted until it is closed." - ) - if project_users: - other_user_names = { - f"{await get_user_fullname(request.app, user_id=uid)}" - for uid in project_users - } - raise web.HTTPForbidden( - reason=f"Project is open by {other_user_names}. " - "It cannot be deleted until the project is closed." - ) - - project_locked_state: ProjectLocked | None - if project_locked_state := await get_project_locked_state( - get_redis_lock_manager_client_sdk(request.app), - project_uuid=path_params.project_id, - ): - raise web.HTTPConflict( - reason=f"Project {path_params.project_id} is locked: {project_locked_state=}" + await _projects_service.get_project_for_user( + request.app, + project_uuid=f"{path_params.project_id}", + user_id=req_ctx.user_id, + ) + project_users: set[int] = set() + with managed_resource(req_ctx.user_id, None, request.app) as user_session: + project_users = { + s.user_id + for s in await user_session.find_users_of_resource( + request.app, PROJECT_ID_KEY, f"{path_params.project_id}" ) + } + # that project is still in use + if req_ctx.user_id in project_users: + raise web.HTTPForbidden( + reason="Project is still open in another tab/browser." + "It cannot be deleted until it is closed." + ) + if project_users: + other_user_names = { + f"{await get_user_fullname(request.app, user_id=uid)}" + for uid in project_users + } + raise web.HTTPForbidden( + reason=f"Project is open by {other_user_names}. " + "It cannot be deleted until the project is closed." + ) - await projects_service.submit_delete_project_task( - request.app, - path_params.project_id, - req_ctx.user_id, - request.headers.get( - X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE - ), + project_locked_state: ProjectLocked | None + if project_locked_state := await get_project_locked_state( + get_redis_lock_manager_client_sdk(request.app), + project_uuid=path_params.project_id, + ): + raise web.HTTPConflict( + reason=f"Project {path_params.project_id} is locked: {project_locked_state=}" ) - except ProjectInvalidRightsError as err: - raise web.HTTPForbidden( - reason="You do not have sufficient rights to delete this project" - ) from err - except ProjectNotFoundError as err: - raise web.HTTPNotFound( - reason=f"Project {path_params.project_id} not found" - ) from err - except ProjectDeleteError as err: - raise web.HTTPConflict(reason=f"{err}") from err + await _projects_service.submit_delete_project_task( + request.app, + project_uuid=path_params.project_id, + user_id=req_ctx.user_id, + simcore_user_agent=request.headers.get( + X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE + ), + ) return web.json_response(status=status.HTTP_204_NO_CONTENT) @@ -523,7 +437,7 @@ async def delete_project(request: web.Request): @login_required @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db -@_handle_projects_exceptions +@handle_plugin_requests_exceptions async def clone_project(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest_schemas.py similarity index 95% rename from services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest_schemas.py index 188b3cc960b..55834b7b658 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest_schemas.py @@ -1,9 +1,3 @@ -""" Handlers for STANDARD methods on /projects colletions - -Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete - -""" - from typing import Annotated, Self from models_library.basic_types import IDStr @@ -38,8 +32,8 @@ X_SIMCORE_USER_AGENT, ) -from .exceptions import WrongTagIdsInQueryError -from .models import ProjectTypeAPI +from ..exceptions import WrongTagIdsInQueryError +from ..models import ProjectTypeAPI class ProjectCreateHeaders(BaseModel): @@ -73,7 +67,7 @@ def check_parent_valid(self) -> Self: model_config = ConfigDict(populate_by_name=False) -class ProjectCreateParams(BaseModel): +class ProjectCreateQueryParams(BaseModel): from_study: ProjectID | None = Field( None, description="Option to create a project from existing template or study: from_study={study_uuid}", @@ -159,8 +153,7 @@ class ProjectsListQueryParams( ProjectsListOrderParams, # type: ignore[misc, valid-type] FiltersQueryParameters[ProjectFilters], ProjectsListExtraQueryParams, -): - ... +): ... class ProjectActiveQueryParams(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/projects/_observer.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_slot.py similarity index 85% rename from services/web/server/src/simcore_service_webserver/projects/_observer.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/projects_slot.py index f830ae40f6f..8537fbc6616 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_observer.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_slot.py @@ -1,6 +1,4 @@ -""" Handlers to events registered in servicelib.observer.event_registry - -""" +"""Handlers to events registered in servicelib.observer.event_registry""" import logging @@ -15,9 +13,9 @@ from servicelib.logging_utils import log_context from servicelib.utils import logged_gather -from ..notifications import project_logs -from ..resource_manager.user_sessions import PROJECT_ID_KEY, managed_resource -from .projects_service import retrieve_and_notify_project_locked_state +from ...notifications import project_logs +from ...resource_manager.user_sessions import PROJECT_ID_KEY, managed_resource +from .._projects_service import retrieve_and_notify_project_locked_state _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py similarity index 67% rename from services/web/server/src/simcore_service_webserver/projects/_states_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py index 956226d7f32..252708921fe 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py @@ -1,7 +1,4 @@ -"""handlers for project states""" - import contextlib -import functools import json import logging @@ -14,7 +11,6 @@ parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler from servicelib.aiohttp.web_exceptions_extension import HTTPLockedError from servicelib.common_headers import ( UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, @@ -23,29 +19,20 @@ from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.webserver_models import ProjectType -from .._meta import API_VTAG as VTAG -from ..director_v2.exceptions import DirectorServiceError -from ..login.decorators import login_required -from ..notifications import project_logs -from ..products.api import Product, get_current_product -from ..resource_usage.errors import DefaultPricingPlanNotFoundError -from ..security.decorators import permission_required -from ..users import api -from ..users.exceptions import UserDefaultWalletNotFoundError -from ..utils_aiohttp import envelope_json_response -from ..wallets.errors import WalletNotEnoughCreditsError -from . import api as projects_api -from . import projects_service -from ._common.models import ProjectPathParams, RequestContext -from .exceptions import ( - DefaultPricingUnitNotFoundError, - ProjectInDebtCanNotChangeWalletError, - ProjectInDebtCanNotOpenError, - ProjectInvalidRightsError, - ProjectNotFoundError, - ProjectStartsTooManyDynamicNodesError, - ProjectTooManyProjectOpenedError, -) +from ..._meta import API_VTAG as VTAG +from ...director_v2.exceptions import DirectorServiceError +from ...login.decorators import login_required +from ...notifications import project_logs +from ...products import products_web +from ...products.models import Product +from ...security.decorators import permission_required +from ...users import api +from ...utils_aiohttp import envelope_json_response +from .. import _projects_service +from .. import api as projects_api +from ..exceptions import ProjectStartsTooManyDynamicNodesError +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ProjectPathParams, RequestContext _logger = logging.getLogger(__name__) @@ -53,38 +40,6 @@ routes = web.RouteTableDef() -def _handle_project_exceptions(handler: Handler): - """Transforms common project errors -> http errors""" - - @functools.wraps(handler) - async def _wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ( - ProjectNotFoundError, - UserDefaultWalletNotFoundError, - DefaultPricingPlanNotFoundError, - DefaultPricingUnitNotFoundError, - ) as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except ProjectInvalidRightsError as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - except ProjectTooManyProjectOpenedError as exc: - raise web.HTTPConflict(reason=f"{exc}") from exc - - except ( - WalletNotEnoughCreditsError, - ProjectInDebtCanNotChangeWalletError, - ProjectInDebtCanNotOpenError, - ) as exc: - raise web.HTTPPaymentRequired(reason=f"{exc}") from exc - - return _wrapper - - # # open project: custom methods https://google.aip.dev/136 # @@ -97,7 +52,7 @@ class _OpenProjectQuery(BaseModel): @routes.post(f"/{VTAG}/projects/{{project_id}}:open", name="open_project") @login_required @permission_required("project.open") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def open_project(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) @@ -112,7 +67,7 @@ async def open_project(request: web.Request) -> web.Response: raise web.HTTPBadRequest(reason="Invalid request body") from exc try: - project_type: ProjectType = await projects_service.get_project_type( + project_type: ProjectType = await _projects_service.get_project_type( request.app, path_params.project_id ) user_role: UserRole = await api.get_user_role( @@ -122,7 +77,7 @@ async def open_project(request: web.Request) -> web.Response: # only USERS/TESTERS can do that raise web.HTTPForbidden(reason="Wrong user role to open/edit a template") - project = await projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -138,9 +93,9 @@ async def open_project(request: web.Request) -> web.Response: product_name=req_ctx.product_name, ) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) - if not await projects_service.try_open_project_for_user( + if not await _projects_service.try_open_project_for_user( req_ctx.user_id, project_uuid=path_params.project_id, client_session_id=client_session_id, @@ -150,7 +105,7 @@ async def open_project(request: web.Request) -> web.Response: raise HTTPLockedError(reason="Project is locked, try later") # the project can be opened, let's update its product links - await projects_service.update_project_linked_product( + await _projects_service.update_project_linked_product( request.app, path_params.project_id, req_ctx.product_name ) @@ -163,30 +118,30 @@ async def open_project(request: web.Request) -> web.Response: # NOTE: this method raises that exception when the number of dynamic # services in the project is highter than the maximum allowed per project # the project shall still open though. - await projects_service.run_project_dynamic_services( + await _projects_service.run_project_dynamic_services( request, project, req_ctx.user_id, req_ctx.product_name ) # and let's update the project last change timestamp - await projects_service.update_project_last_change_timestamp( + await _projects_service.update_project_last_change_timestamp( request.app, path_params.project_id ) # notify users that project is now opened - project = await projects_service.add_project_states_for_user( + project = await _projects_service.add_project_states_for_user( user_id=req_ctx.user_id, project=project, is_template=False, app=request.app, ) - await projects_service.notify_project_state_update(request.app, project) + await _projects_service.notify_project_state_update(request.app, project) return envelope_json_response(ProjectGet.from_domain_model(project)) except DirectorServiceError as exc: # there was an issue while accessing the director-v2/director-v0 # ensure the project is closed again - await projects_service.try_close_project_for_user( + await _projects_service.try_close_project_for_user( user_id=req_ctx.user_id, project_uuid=f"{path_params.project_id}", client_session_id=client_session_id, @@ -208,7 +163,7 @@ async def open_project(request: web.Request) -> web.Response: @routes.post(f"/{VTAG}/projects/{{project_id}}:close", name="close_project") @login_required @permission_required("project.close") -@_handle_project_exceptions +@handle_plugin_requests_exceptions async def close_project(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) @@ -220,13 +175,13 @@ async def close_project(request: web.Request) -> web.Response: raise web.HTTPBadRequest(reason="Invalid request body") from exc # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, include_state=False, ) - await projects_service.try_close_project_for_user( + await _projects_service.try_close_project_for_user( req_ctx.user_id, f"{path_params.project_id}", client_session_id, @@ -247,12 +202,13 @@ async def close_project(request: web.Request) -> web.Response: @routes.get(f"/{VTAG}/projects/{{project_id}}/state", name="get_project_state") @login_required @permission_required("project.read") +@handle_plugin_requests_exceptions async def get_project_state(request: web.Request) -> web.Response: req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) # check that project exists and queries state - validated_project = await projects_service.get_project_for_user( + validated_project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_tags_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/tags_rest.py similarity index 80% rename from services/web/server/src/simcore_service_webserver/projects/_tags_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/tags_rest.py index 1eff696a177..9fce75cda63 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_tags_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/tags_rest.py @@ -1,18 +1,15 @@ -""" Handlers for CRUD operations on /projects/{*}/tags/{*} - -""" - import logging from aiohttp import web from models_library.projects import ProjectID from servicelib.request_keys import RQT_USERID_KEY -from simcore_service_webserver.utils_aiohttp import envelope_json_response -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from . import _tags_api as tags_api +from ..._meta import API_VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _tags_service as tags_api +from ._rest_exceptions import handle_plugin_requests_exceptions _logger = logging.getLogger(__name__) @@ -25,6 +22,7 @@ ) @login_required @permission_required("project.tag.*") +@handle_plugin_requests_exceptions async def add_project_tag(request: web.Request): user_id: int = request[RQT_USERID_KEY] @@ -51,6 +49,7 @@ async def add_project_tag(request: web.Request): ) @login_required @permission_required("project.tag.*") +@handle_plugin_requests_exceptions async def remove_project_tag(request: web.Request): user_id: int = request[RQT_USERID_KEY] diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/trash_rest.py similarity index 70% rename from services/web/server/src/simcore_service_webserver/projects/_trash_rest.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/trash_rest.py index 22368285efc..f1cae188a7d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_trash_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/trash_rest.py @@ -7,28 +7,25 @@ parse_request_query_parameters_as, ) -from .._meta import API_VTAG as VTAG -from ..exception_handling import ( +from ..._meta import API_VTAG as VTAG +from ...exception_handling import ( ExceptionToHttpErrorMap, HttpErrorInfo, exception_handling_decorator, to_exceptions_handlers_map, ) -from ..login.decorators import get_user_id, login_required -from ..products.api import get_product_name -from ..security.decorators import permission_required -from . import _trash_service -from ._common.models import ProjectPathParams, RemoveQueryParams -from .exceptions import ProjectRunningConflictError, ProjectStoppingError +from ...login.decorators import get_user_id, login_required +from ...products import products_web +from ...security.decorators import permission_required +from .. import _trash_service +from ..exceptions import ProjectRunningConflictError, ProjectStoppingError +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ProjectPathParams, RemoveQueryParams _logger = logging.getLogger(__name__) -# -# EXCEPTIONS HANDLING -# - -_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { +_TRASH_ERRORS: ExceptionToHttpErrorMap = { ProjectRunningConflictError: HttpErrorInfo( status.HTTP_409_CONFLICT, "Current study is in use and cannot be trashed [project_id={project_uuid}]. Please stop all services first and try again", @@ -39,26 +36,22 @@ ), } - -_handle_exceptions = exception_handling_decorator( - to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +_handle_local_request_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TRASH_ERRORS) ) -# -# ROUTES -# - routes = web.RouteTableDef() @routes.post(f"/{VTAG}/projects/{{project_id}}:trash", name="trash_project") @login_required @permission_required("project.delete") -@_handle_exceptions +@handle_plugin_requests_exceptions +@_handle_local_request_exceptions async def trash_project(request: web.Request): user_id = get_user_id(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) query_params: RemoveQueryParams = parse_request_query_parameters_as( RemoveQueryParams, request @@ -79,10 +72,11 @@ async def trash_project(request: web.Request): @routes.post(f"/{VTAG}/projects/{{project_id}}:untrash", name="untrash_project") @login_required @permission_required("project.delete") -@_handle_exceptions +@handle_plugin_requests_exceptions +@_handle_local_request_exceptions async def untrash_project(request: web.Request): user_id = get_user_id(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) await _trash_service.untrash_project( diff --git a/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/wallets_rest.py similarity index 67% rename from services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/wallets_rest.py index 346958c3487..a2b734a20cb 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/wallets_rest.py @@ -1,8 +1,3 @@ -""" Handlers for CRUD operations on /projects/{*}/wallet - -""" - -import functools import logging from decimal import Decimal from typing import Annotated @@ -17,49 +12,16 @@ parse_request_body_as, parse_request_path_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler -from simcore_service_webserver.utils_aiohttp import envelope_json_response - -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..wallets.errors import WalletAccessForbiddenError, WalletNotFoundError -from . import _wallets_api as wallets_api -from . import projects_service -from ._common.models import ProjectPathParams, RequestContext -from .exceptions import ( - ProjectInDebtCanNotChangeWalletError, - ProjectInvalidRightsError, - ProjectNotFoundError, - ProjectWalletPendingTransactionError, -) - -_logger = logging.getLogger(__name__) - -def _handle_project_wallet_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) +from ..._meta import API_VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from ...utils_aiohttp import envelope_json_response +from .. import _projects_service, _wallets_service +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import ProjectPathParams, RequestContext - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except WalletNotFoundError as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except ProjectInDebtCanNotChangeWalletError as exc: - raise web.HTTPPaymentRequired(reason=f"{exc}") from exc - - except ( - WalletAccessForbiddenError, - ProjectInvalidRightsError, - ProjectWalletPendingTransactionError, - ) as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return wrapper +_logger = logging.getLogger(__name__) routes = web.RouteTableDef() @@ -68,19 +30,19 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @routes.get(f"/{API_VTAG}/projects/{{project_id}}/wallet", name="get_project_wallet") @login_required @permission_required("project.wallet.*") -@_handle_project_wallet_exceptions +@handle_plugin_requests_exceptions async def get_project_wallet(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, include_state=False, ) - wallet: WalletGet | None = await wallets_api.get_project_wallet( + wallet: WalletGet | None = await _wallets_service.get_project_wallet( request.app, path_params.project_id ) @@ -99,20 +61,20 @@ class _ProjectWalletPathParams(BaseModel): ) @login_required @permission_required("project.wallet.*") -@_handle_project_wallet_exceptions +@handle_plugin_requests_exceptions async def connect_wallet_to_project(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectWalletPathParams, request) # ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, include_state=False, ) - wallet: WalletGet = await wallets_api.connect_wallet_to_project( + wallet: WalletGet = await _wallets_service.connect_wallet_to_project( request.app, product_name=req_ctx.product_name, project_id=path_params.project_id, @@ -134,14 +96,14 @@ class _PayProjectDebtBody(BaseModel): ) @login_required @permission_required("project.wallet.*") -@_handle_project_wallet_exceptions +@handle_plugin_requests_exceptions async def pay_project_debt(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectWalletPathParams, request) body_params = await parse_request_body_as(_PayProjectDebtBody, request) # Ensure the project exists - await projects_service.get_project_for_user( + await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, @@ -149,7 +111,7 @@ async def pay_project_debt(request: web.Request): ) # Get curently associated wallet with the project - current_wallet: WalletGet | None = await wallets_api.get_project_wallet( + current_wallet: WalletGet | None = await _wallets_service.get_project_wallet( request.app, path_params.project_id ) if not current_wallet: @@ -172,7 +134,7 @@ async def pay_project_debt(request: web.Request): # Steps: # 1. Transfer the required credits from the specified wallet to the connected wallet. # 2. Mark the project transactions as billed - await wallets_api.pay_debt_with_different_wallet( + await _wallets_service.pay_debt_with_different_wallet( app=request.app, product_name=req_ctx.product_name, project_id=path_params.project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_controller/workspaces_rest.py similarity index 52% rename from services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py rename to services/web/server/src/simcore_service_webserver/projects/_controller/workspaces_rest.py index b5a6082cb50..14b085c66b1 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/workspaces_rest.py @@ -1,4 +1,3 @@ -import functools import logging from typing import Annotated @@ -9,43 +8,17 @@ from pydantic import BaseModel, BeforeValidator, ConfigDict, Field from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as -from servicelib.aiohttp.typing_extension import Handler -from .._meta import api_version_prefix as VTAG -from ..folders.errors import FolderAccessForbiddenError, FolderNotFoundError -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..workspaces.errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError -from . import _workspaces_api -from ._common.models import RequestContext -from .exceptions import ProjectInvalidRightsError, ProjectNotFoundError +from ..._meta import api_version_prefix as VTAG +from ...login.decorators import login_required +from ...security.decorators import permission_required +from .. import _workspaces_service +from ._rest_exceptions import handle_plugin_requests_exceptions +from ._rest_schemas import RequestContext _logger = logging.getLogger(__name__) -def _handle_projects_workspaces_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ( - ProjectNotFoundError, - FolderNotFoundError, - WorkspaceNotFoundError, - ) as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except ( - ProjectInvalidRightsError, - FolderAccessForbiddenError, - WorkspaceAccessForbiddenError, - ) as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - return wrapper - - routes = web.RouteTableDef() @@ -64,14 +37,14 @@ class _ProjectWorkspacesPathParams(BaseModel): ) @login_required @permission_required("project.workspaces.*") -@_handle_projects_workspaces_exceptions +@handle_plugin_requests_exceptions async def move_project_to_workspace(request: web.Request): req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectWorkspacesPathParams, request ) - await _workspaces_api.move_project_into_workspace( + await _workspaces_service.move_project_into_workspace( app=request.app, user_id=req_ctx.user_id, project_id=path_params.project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index feebc745fe6..4bb6f9b938a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -27,10 +27,10 @@ from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB from ..application_settings import get_application_settings -from ..catalog import client as catalog_client -from ..director_v2 import api as director_v2_api -from ..dynamic_scheduler import api as dynamic_scheduler_api -from ..folders import _folders_repository as _folders_repository +from ..catalog import catalog_service +from ..director_v2 import director_v2_service +from ..dynamic_scheduler import api as dynamic_scheduler_service +from ..folders import _folders_repository as folders_folders_repository from ..redis import get_redis_lock_manager_client_sdk from ..storage.api import ( copy_data_folders_from_project, @@ -39,11 +39,10 @@ from ..users.api import get_user_fullname from ..workspaces.api import check_user_workspace_access, get_user_workspace from ..workspaces.errors import WorkspaceAccessForbiddenError -from . import _folders_db as project_to_folders_db -from . import projects_service -from ._metadata_api import set_project_ancestors -from ._permalink_api import update_or_pop_permalink_in_project -from .db import ProjectDBAPI +from . import _folders_repository, _projects_service +from ._metadata_service import set_project_ancestors +from ._permalink_service import update_or_pop_permalink_in_project +from ._projects_repository_legacy import ProjectDBAPI from .exceptions import ( ParentNodeNotFoundError, ParentProjectNotFoundError, @@ -77,7 +76,7 @@ async def _prepare_project_copy( deep_copy: bool, task_progress: TaskProgress, ) -> tuple[ProjectDict, CopyProjectNodesCoro | None, CopyFileCoro | None]: - source_project = await projects_service.get_project_for_user( + source_project = await _projects_service.get_project_for_user( app, project_uuid=f"{src_project_uuid}", user_id=user_id, @@ -192,7 +191,7 @@ async def _copy() -> None: owner=Owner( user_id=user_id, **await get_user_fullname(app, user_id=user_id) ), - notification_cb=projects_service.create_user_notification_cb( + notification_cb=_projects_service.create_user_notification_cb( user_id, ProjectID(f"{source_project['uuid']}"), app ), )(_copy)() @@ -219,7 +218,7 @@ async def _compose_project_data( NodeID(node_id): ProjectNodeCreate( node_id=NodeID(node_id), required_resources=jsonable_encoder( - await catalog_client.get_service_resources( + await catalog_service.get_service_resources( app, user_id, node_data["key"], node_data["version"] ) ), @@ -293,7 +292,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche ) if folder_id := predefined_project.get("folderId", None): # Check user has access to folder - await _folders_repository.get_for_user_or_workspace( + await folders_folders_repository.get_for_user_or_workspace( request.app, folder_id=folder_id, product_name=product_name, @@ -322,7 +321,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche # 1.2 does project belong to some folder? workspace_id = new_project["workspaceId"] - prj_to_folder_db = await project_to_folders_db.get_project_to_folder( + prj_to_folder_db = await _folders_repository.get_project_to_folder( request.app, project_id=from_study, private_workspace_user_id_or_none=( @@ -369,7 +368,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche # 3.2 move project to proper folder if folder_id: - await project_to_folders_db.insert_project_to_folder( + await _folders_repository.insert_project_to_folder( request.app, project_id=new_project["uuid"], folder_id=folder_id, @@ -390,13 +389,13 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche ) # update the network information in director-v2 - await dynamic_scheduler_api.update_projects_networks( + await dynamic_scheduler_service.update_projects_networks( request.app, project_id=ProjectID(new_project["uuid"]) ) task_progress.update() # This is a new project and every new graph needs to be reflected in the pipeline tables - await director_v2_api.create_or_update_pipeline( + await director_v2_service.create_or_update_pipeline( request.app, user_id, new_project["uuid"], product_name ) # get the latest state of the project (lastChangeDate for instance) @@ -404,7 +403,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche project_uuid=new_project["uuid"] ) # Appends state - new_project = await projects_service.add_project_states_for_user( + new_project = await _projects_service.add_project_states_for_user( user_id=user_id, project=new_project, is_template=as_template, @@ -419,9 +418,9 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche user_specific_project_data_db = ( await _projects_repository.get_user_specific_project_data_db( project_uuid=new_project["uuid"], - private_workspace_user_id_or_none=user_id - if workspace_id is None - else None, + private_workspace_user_id_or_none=( + user_id if workspace_id is None else None + ), ) ) new_project["folderId"] = user_specific_project_data_db.folder_id @@ -460,7 +459,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche except (ParentProjectNotFoundError, ParentNodeNotFoundError) as exc: if project_uuid := new_project.get("uuid"): - await projects_service.submit_delete_project_task( + await _projects_service.submit_delete_project_task( app=request.app, project_uuid=project_uuid, user_id=user_id, @@ -474,7 +473,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche f"{user_id=}", ) if project_uuid := new_project.get("uuid"): - await projects_service.submit_delete_project_task( + await _projects_service.submit_delete_project_task( app=request.app, project_uuid=project_uuid, user_id=user_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py index 8fa770ddae9..866609110f3 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py @@ -1,4 +1,4 @@ -""" Implements logic to delete a project (and all associated services, data, etc) +"""Implements logic to delete a project (and all associated services, data, etc) NOTE: this entire module is protected within the `projects` package @@ -13,12 +13,12 @@ from models_library.projects import ProjectID from models_library.users import UserID -from ..director_v2 import api +from ..director_v2 import director_v2_service from ..storage.api import delete_data_folders_of_project from ..users.api import FullNameDict from ..users.exceptions import UserNotFoundError -from ._access_rights_api import check_user_project_permission -from .db import ProjectDBAPI +from ._access_rights_service import check_user_project_permission +from ._projects_repository_legacy import ProjectDBAPI from .exceptions import ( ProjectDeleteError, ProjectInvalidRightsError, @@ -44,8 +44,7 @@ async def __call__( *, notify_users: bool = True, user_name: FullNameDict | None = None, - ) -> None: - ... + ) -> None: ... async def mark_project_as_deleted( @@ -78,7 +77,7 @@ async def delete_project( app: web.Application, project_uuid: ProjectID, user_id: UserID, - simcore_user_agent, + simcore_user_agent: str, remove_project_dynamic_services: RemoveProjectServicesCallable, ) -> None: """Stops dynamic services, deletes data and finally deletes project @@ -111,7 +110,7 @@ async def delete_project( # stops computational services # - raises DirectorServiceError - await api.delete_pipeline(app, user_id, project_uuid) + await director_v2_service.delete_pipeline(app, user_id, project_uuid) # rm data from storage await delete_data_folders_of_project(app, project_uuid, user_id) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py index 43ba76b6673..45c0837ec03 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py @@ -1,10 +1,13 @@ -""" Utils to implement READ operations (from cRud) on the project resource +"""Utils to implement READ operations (from cRud) on the project resource Read operations are list, get """ +from collections.abc import Coroutine +from typing import Any + from aiohttp import web from models_library.folders import FolderID, FolderQuery, FolderScope from models_library.projects import ProjectID @@ -15,59 +18,71 @@ from servicelib.utils import logged_gather from simcore_postgres_database.models.projects import ProjectType from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB -from simcore_service_webserver.projects._projects_db import ( - batch_get_trashed_by_primary_gid, -) -from ..catalog.client import get_services_for_user_in_product -from ..folders import _folders_repository as _folders_repository +from ..catalog import catalog_service +from ..folders import _folders_repository from ..workspaces._workspaces_service import check_user_workspace_access -from . import projects_service -from ._permalink_api import update_or_pop_permalink_in_project -from .db import ProjectDBAPI +from . import _projects_service +from ._projects_repository import batch_get_trashed_by_primary_gid +from ._projects_repository_legacy import ProjectDBAPI from .models import ProjectDict, ProjectTypeAPI -async def _update_project_dict( - request: web.Request, - *, - user_id: UserID, - project: ProjectDict, - is_template: bool, -) -> ProjectDict: - # state - await projects_service.add_project_states_for_user( - user_id=user_id, - project=project, - is_template=is_template, - app=request.app, - ) +def _batch_update( + key: str, + value_per_object: list[Any], + objects: list[dict[str, Any]], +) -> list[dict[str, Any]]: + for obj, value in zip(objects, value_per_object, strict=True): + obj[key] = value + return objects - # permalink - await update_or_pop_permalink_in_project(request, project) - return project +async def _paralell_update(*update_per_object: Coroutine) -> list[Any]: + return await logged_gather( + *update_per_object, + reraise=True, + max_concurrency=100, + ) -async def _batch_update_list_of_project_dict( - app: web.Application, list_of_project_dict: list[ProjectDict] +async def _aggregate_data_to_projects_from_other_sources( + app: web.Application, + *, + db_projects: list[ProjectDict], + db_project_types: list[ProjectTypeDB], + user_id: UserID, ) -> list[ProjectDict]: - - # updating `trashed_by_primary_gid` + """ + Aggregates data to each project from other sources, first as a batch-update and then as a parallel-update. + """ + # updating `project.trashed_by_primary_gid` trashed_by_primary_gid_values = await batch_get_trashed_by_primary_gid( - app, projects_uuids=[ProjectID(p["uuid"]) for p in list_of_project_dict] + app, projects_uuids=[ProjectID(p["uuid"]) for p in db_projects] ) - for project_dict, value in zip( - list_of_project_dict, trashed_by_primary_gid_values, strict=True - ): - project_dict.update(trashed_by_primary_gid=value) + _batch_update("trashed_by_primary_gid", trashed_by_primary_gid_values, db_projects) + + # udpating `project.state` + update_state_per_project = [ + _projects_service.add_project_states_for_user( + user_id=user_id, + project=prj, + is_template=prj_type == ProjectTypeDB.TEMPLATE, + app=app, + ) + for prj, prj_type in zip(db_projects, db_project_types, strict=False) + ] + + updated_projects: list[ProjectDict] = await _paralell_update( + *update_state_per_project, + ) - return list_of_project_dict + return updated_projects async def list_projects( # pylint: disable=too-many-arguments - request: web.Request, + app: web.Application, user_id: UserID, product_name: str, *, @@ -87,11 +102,12 @@ async def list_projects( # pylint: disable=too-many-arguments # ordering order_by: OrderBy, ) -> tuple[list[ProjectDict], int]: - app = request.app db = ProjectDBAPI.get_from_app_context(app) - user_available_services: list[dict] = await get_services_for_user_in_product( - app, user_id, product_name, only_key_versions=True + user_available_services: list[dict] = ( + await catalog_service.get_services_for_user_in_product( + app, user_id, product_name, only_key_versions=True + ) ) workspace_is_private = True @@ -145,27 +161,15 @@ async def list_projects( # pylint: disable=too-many-arguments order_by=order_by, ) - db_projects = await _batch_update_list_of_project_dict(app, db_projects) - - projects: list[ProjectDict] = await logged_gather( - *( - _update_project_dict( - request, - user_id=user_id, - project=prj, - is_template=prj_type == ProjectTypeDB.TEMPLATE, - ) - for prj, prj_type in zip(db_projects, db_project_types, strict=False) - ), - reraise=True, - max_concurrency=100, + projects = await _aggregate_data_to_projects_from_other_sources( + app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id ) return projects, total_number_projects async def list_projects_full_depth( - request, + app: web.Application, *, user_id: UserID, product_name: str, @@ -180,10 +184,12 @@ async def list_projects_full_depth( search_by_multi_columns: str | None, search_by_project_name: str | None, ) -> tuple[list[ProjectDict], int]: - db = ProjectDBAPI.get_from_app_context(request.app) + db = ProjectDBAPI.get_from_app_context(app) - user_available_services: list[dict] = await get_services_for_user_in_product( - request.app, user_id, product_name, only_key_versions=True + user_available_services: list[dict] = ( + await catalog_service.get_services_for_user_in_product( + app, user_id, product_name, only_key_versions=True + ) ) db_projects, db_project_types, total_number_projects = await db.list_projects_dicts( @@ -202,30 +208,8 @@ async def list_projects_full_depth( order_by=order_by, ) - db_projects = await _batch_update_list_of_project_dict(request.app, db_projects) - - projects: list[ProjectDict] = await logged_gather( - *( - _update_project_dict( - request, - user_id=user_id, - project=prj, - is_template=prj_type == ProjectTypeDB.TEMPLATE, - ) - for prj, prj_type in zip(db_projects, db_project_types, strict=False) - ), - reraise=True, - max_concurrency=100, + projects = await _aggregate_data_to_projects_from_other_sources( + app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id ) return projects, total_number_projects - - -async def get_project( - request: web.Request, - user_id: UserID, - product_name: str, - project_uuid: ProjectID, - project_type: ProjectTypeAPI, -): - raise NotImplementedError diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_db.py b/services/web/server/src/simcore_service_webserver/projects/_folders_repository.py similarity index 98% rename from services/web/server/src/simcore_service_webserver/projects/_folders_db.py rename to services/web/server/src/simcore_service_webserver/projects/_folders_repository.py index d4fde1f5ce9..f9c4e689eac 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_folders_repository.py @@ -1,9 +1,3 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" - import logging from datetime import datetime diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_api.py b/services/web/server/src/simcore_service_webserver/projects/_folders_service.py similarity index 66% rename from services/web/server/src/simcore_service_webserver/projects/_folders_api.py rename to services/web/server/src/simcore_service_webserver/projects/_folders_service.py index 7595f31d94d..88659d68ac5 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_folders_service.py @@ -6,10 +6,10 @@ from models_library.projects import ProjectID from models_library.users import UserID -from ..folders import _folders_repository as _folders_repository -from ..projects._access_rights_api import get_user_project_access_rights -from . import _folders_db as project_to_folders_db -from .db import APP_PROJECT_DBAPI, ProjectDBAPI +from ..folders import _folders_repository as folders_folders_repository +from . import _folders_repository +from ._access_rights_service import get_user_project_access_rights +from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI from .exceptions import ProjectInvalidRightsError _logger = logging.getLogger(__name__) @@ -43,46 +43,48 @@ async def move_project_into_folder( ) workspace_is_private = False + private_workspace_user_id_or_none: UserID | None = ( + user_id if workspace_is_private else None + ) + if folder_id: # Check user has access to folder - await _folders_repository.get_for_user_or_workspace( + await folders_folders_repository.get_for_user_or_workspace( app, folder_id=folder_id, product_name=product_name, - user_id=user_id if workspace_is_private else None, + user_id=private_workspace_user_id_or_none, workspace_id=project_db.workspace_id, ) # Move project to folder - prj_to_folder_db = await project_to_folders_db.get_project_to_folder( + prj_to_folder_db = await _folders_repository.get_project_to_folder( app, project_id=project_id, - private_workspace_user_id_or_none=user_id if workspace_is_private else None, + private_workspace_user_id_or_none=private_workspace_user_id_or_none, ) if prj_to_folder_db is None: if folder_id is None: return - await project_to_folders_db.insert_project_to_folder( + await _folders_repository.insert_project_to_folder( app, project_id=project_id, folder_id=folder_id, - private_workspace_user_id_or_none=user_id if workspace_is_private else None, + private_workspace_user_id_or_none=private_workspace_user_id_or_none, ) else: # Delete old - await project_to_folders_db.delete_project_to_folder( + await _folders_repository.delete_project_to_folder( app, project_id=project_id, folder_id=prj_to_folder_db.folder_id, - private_workspace_user_id_or_none=user_id if workspace_is_private else None, + private_workspace_user_id_or_none=private_workspace_user_id_or_none, ) # Create new if folder_id is not None: - await project_to_folders_db.insert_project_to_folder( + await _folders_repository.insert_project_to_folder( app, project_id=project_id, folder_id=folder_id, - private_workspace_user_id_or_none=( - user_id if workspace_is_private else None - ), + private_workspace_user_id_or_none=private_workspace_user_id_or_none, ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_models.py b/services/web/server/src/simcore_service_webserver/projects/_groups_models.py new file mode 100644 index 00000000000..a9d0828ed5b --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_models.py @@ -0,0 +1,18 @@ +import logging +from datetime import datetime + +from models_library.groups import GroupID +from pydantic import BaseModel, ConfigDict + +_logger = logging.getLogger(__name__) + + +class ProjectGroupGetDB(BaseModel): + gid: GroupID + read: bool + write: bool + delete: bool + created: datetime + modified: datetime + + model_config = ConfigDict(from_attributes=True) diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_db.py b/services/web/server/src/simcore_service_webserver/projects/_groups_repository.py similarity index 93% rename from services/web/server/src/simcore_service_webserver/projects/_groups_db.py rename to services/web/server/src/simcore_service_webserver/projects/_groups_repository.py index 86d9c83d781..00f7d467054 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_repository.py @@ -1,16 +1,9 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" - import logging -from datetime import datetime from aiohttp import web from models_library.groups import GroupID from models_library.projects import ProjectID -from pydantic import BaseModel, ConfigDict, TypeAdapter +from pydantic import TypeAdapter from simcore_postgres_database.models.project_to_groups import project_to_groups from simcore_postgres_database.utils_repos import transaction_context from sqlalchemy import func, literal_column @@ -19,26 +12,11 @@ from sqlalchemy.sql import select from ..db.plugin import get_asyncpg_engine +from ._groups_models import ProjectGroupGetDB from .exceptions import ProjectGroupNotFoundError _logger = logging.getLogger(__name__) -### Models - - -class ProjectGroupGetDB(BaseModel): - gid: GroupID - read: bool - write: bool - delete: bool - created: datetime - modified: datetime - - model_config = ConfigDict(from_attributes=True) - - -## DB API - async def create_project_group( app: web.Application, diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_api.py b/services/web/server/src/simcore_service_webserver/projects/_groups_service.py similarity index 82% rename from services/web/server/src/simcore_service_webserver/projects/_groups_api.py rename to services/web/server/src/simcore_service_webserver/projects/_groups_service.py index 355b25481f6..af2d9161b10 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_service.py @@ -8,11 +8,11 @@ from models_library.users import UserID from pydantic import BaseModel -from ..users import api as users_api -from . import _groups_db as projects_groups_db -from ._access_rights_api import check_user_project_permission -from ._groups_db import ProjectGroupGetDB -from .db import APP_PROJECT_DBAPI, ProjectDBAPI +from ..users import api as users_service +from . import _groups_repository +from ._access_rights_service import check_user_project_permission +from ._groups_models import ProjectGroupGetDB +from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI from .exceptions import ProjectInvalidRightsError _logger = logging.getLogger(__name__) @@ -46,7 +46,7 @@ async def create_project_group( permission="write", ) - project_group_db: ProjectGroupGetDB = await projects_groups_db.create_project_group( + project_group_db: ProjectGroupGetDB = await _groups_repository.create_project_group( app=app, project_id=project_id, group_id=group_id, @@ -76,9 +76,9 @@ async def list_project_groups_by_user_and_project( permission="read", ) - project_groups_db: list[ - ProjectGroupGetDB - ] = await projects_groups_db.list_project_groups(app=app, project_id=project_id) + project_groups_db: list[ProjectGroupGetDB] = ( + await _groups_repository.list_project_groups(app=app, project_id=project_id) + ) project_groups_api: list[ProjectGroupGet] = [ ProjectGroupGet.model_validate(group.model_dump()) @@ -109,9 +109,9 @@ async def replace_project_group( project_db: ProjectDBAPI = app[APP_PROJECT_DBAPI] project = await project_db.get_project_db(project_id) - project_owner_user: dict = await users_api.get_user(app, project.prj_owner) + project_owner_user: dict = await users_service.get_user(app, project.prj_owner) if project_owner_user["primary_gid"] == group_id: - user: dict = await users_api.get_user(app, user_id) + user: dict = await users_service.get_user(app, user_id) if user["primary_gid"] != project_owner_user["primary_gid"]: # Only the owner of the project can modify the owner group raise ProjectInvalidRightsError( @@ -121,7 +121,7 @@ async def replace_project_group( ) project_group_db: ProjectGroupGetDB = ( - await projects_groups_db.replace_project_group( + await _groups_repository.replace_project_group( app=app, project_id=project_id, group_id=group_id, @@ -143,7 +143,7 @@ async def delete_project_group( group_id: GroupID, product_name: ProductName, ) -> None: - user: dict = await users_api.get_user(app, user_id=user_id) + user: dict = await users_service.get_user(app, user_id=user_id) if user["primary_gid"] != group_id: await check_user_project_permission( app, @@ -155,7 +155,7 @@ async def delete_project_group( project_db: ProjectDBAPI = app[APP_PROJECT_DBAPI] project = await project_db.get_project_db(project_id) - project_owner_user: dict = await users_api.get_user(app, project.prj_owner) + project_owner_user: dict = await users_service.get_user(app, project.prj_owner) if project_owner_user["primary_gid"] == group_id: if user["primary_gid"] != project_owner_user["primary_gid"]: # Only the owner of the project can delete the owner group @@ -165,7 +165,7 @@ async def delete_project_group( reason=f"User does not have access to modify owner project group in project {project_id}", ) - await projects_groups_db.delete_project_group( + await _groups_repository.delete_project_group( app=app, project_id=project_id, group_id=group_id ) @@ -179,7 +179,7 @@ async def delete_project_group_without_checking_permissions( project_id: ProjectID, group_id: GroupID, ) -> None: - await projects_groups_db.delete_project_group( + await _groups_repository.delete_project_group( app=app, project_id=project_id, group_id=group_id ) @@ -193,7 +193,7 @@ async def create_project_group_without_checking_permissions( write: bool, delete: bool, ) -> None: - await projects_groups_db.update_or_insert_project_group( + await _groups_repository.update_or_insert_project_group( app=app, project_id=project_id, group_id=group_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/projects/_metadata_db.py rename to services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py similarity index 85% rename from services/web/server/src/simcore_service_webserver/projects/_metadata_api.py rename to services/web/server/src/simcore_service_webserver/projects/_metadata_service.py index f17c7941a1d..6eb8662c841 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py @@ -9,8 +9,8 @@ from pydantic import TypeAdapter from ..db.plugin import get_database_engine -from . import _metadata_db -from ._access_rights_api import validate_project_ownership +from . import _metadata_repository +from ._access_rights_service import validate_project_ownership _logger = logging.getLogger(__name__) @@ -20,7 +20,7 @@ async def get_project_custom_metadata( ) -> MetadataDict: await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) - return await _metadata_db.get_project_custom_metadata( + return await _metadata_repository.get_project_custom_metadata( engine=get_database_engine(app), project_uuid=project_uuid ) @@ -33,7 +33,7 @@ async def set_project_custom_metadata( ) -> MetadataDict: await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) - return await _metadata_db.set_project_custom_metadata( + return await _metadata_repository.set_project_custom_metadata( engine=get_database_engine(app), project_uuid=project_uuid, custom_metadata=value, @@ -48,7 +48,7 @@ async def _project_has_ancestors( ) -> bool: await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) - return await _metadata_db.project_has_ancestors( + return await _metadata_repository.project_has_ancestors( engine=get_database_engine(app), project_uuid=project_uuid ) @@ -73,11 +73,11 @@ async def set_project_ancestors_from_custom_metadata( return # let's try to get the parent project UUID - parent_project_uuid = await _metadata_db.get_project_id_from_node_id( + parent_project_uuid = await _metadata_repository.get_project_id_from_node_id( get_database_engine(app), node_id=parent_node_id ) - await _metadata_db.set_project_ancestors( + await _metadata_repository.set_project_ancestors( get_database_engine(app), project_uuid=project_uuid, parent_project_uuid=parent_project_uuid, @@ -94,7 +94,7 @@ async def set_project_ancestors( ) -> None: await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) - await _metadata_db.set_project_ancestors( + await _metadata_repository.set_project_ancestors( get_database_engine(app), project_uuid=project_uuid, parent_project_uuid=parent_project_uuid, diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py new file mode 100644 index 00000000000..e5060360265 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py @@ -0,0 +1,18 @@ +from aiohttp import web +from models_library.projects import ProjectID +from models_library.services_types import ServiceKey, ServiceVersion +from simcore_postgres_database.utils_projects_nodes import ProjectNodesRepo + +from ..db.plugin import get_database_engine + + +async def get_project_nodes_services( + app: web.Application, *, project_uuid: ProjectID +) -> list[tuple[ServiceKey, ServiceVersion]]: + repo = ProjectNodesRepo(project_uuid=project_uuid) + + async with get_database_engine(app).acquire() as conn: + nodes = await repo.list(conn) + + # removes duplicates by preserving order + return list(dict.fromkeys((node.key, node.version) for node in nodes)) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py similarity index 95% rename from services/web/server/src/simcore_service_webserver/projects/_nodes_api.py rename to services/web/server/src/simcore_service_webserver/projects/_nodes_service.py index 4815ae19d03..0206e1315cc 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py @@ -7,11 +7,12 @@ from aiohttp import web from aiohttp.client import ClientError -from models_library.api_schemas_storage import FileMetaDataGet +from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet from models_library.basic_types import KeyIDStr from models_library.projects import ProjectID from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID, SimCoreFileLink +from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID from pydantic import ( BaseModel, @@ -26,6 +27,7 @@ from ..application_settings import get_application_settings from ..storage.api import get_download_link, get_files_in_node_folder +from . import _nodes_repository from .exceptions import ProjectStartsTooManyDynamicNodesError _logger = logging.getLogger(__name__) @@ -71,6 +73,14 @@ def get_total_project_dynamic_nodes_creation_interval( return max_nodes * _NODE_START_INTERVAL_S.total_seconds() +async def get_project_nodes_services( + app: web.Application, *, project_uuid: ProjectID +) -> list[tuple[ServiceKey, ServiceVersion]]: + return await _nodes_repository.get_project_nodes_services( + app, project_uuid=project_uuid + ) + + # # PREVIEWS # diff --git a/services/web/server/src/simcore_service_webserver/projects/_permalink_api.py b/services/web/server/src/simcore_service_webserver/projects/_permalink_service.py similarity index 58% rename from services/web/server/src/simcore_service_webserver/projects/_permalink_api.py rename to services/web/server/src/simcore_service_webserver/projects/_permalink_service.py index 241376ae76a..e6fa6e61a8b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_permalink_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_permalink_service.py @@ -1,7 +1,6 @@ import asyncio import logging -from collections.abc import Callable, Coroutine -from typing import Any, cast +from typing import Protocol, cast from aiohttp import web from models_library.api_schemas_webserver.permalinks import ProjectPermalink @@ -13,21 +12,24 @@ _PROJECT_PERMALINK = f"{__name__}" _logger = logging.getLogger(__name__) -_CreateLinkCallable = Callable[ - [web.Request, ProjectID], Coroutine[Any, Any, ProjectPermalink] -] +class CreateLinkCoroutine(Protocol): + async def __call__( + self, request: web.Request, project_uuid: ProjectID + ) -> ProjectPermalink: + ... -def register_factory(app: web.Application, factory_coro: _CreateLinkCallable): + +def register_factory(app: web.Application, factory_coro: CreateLinkCoroutine): if _create := app.get(_PROJECT_PERMALINK): msg = f"Permalink factory can only be set once: registered {_create}" raise PermalinkFactoryError(msg) app[_PROJECT_PERMALINK] = factory_coro -def _get_factory(app: web.Application) -> _CreateLinkCallable: +def _get_factory(app: web.Application) -> CreateLinkCoroutine: if _create := app.get(_PROJECT_PERMALINK): - return cast(_CreateLinkCallable, _create) + return cast(CreateLinkCoroutine, _create) msg = "Undefined permalink factory. Check plugin initialization." raise PermalinkFactoryError(msg) @@ -37,17 +39,18 @@ def _get_factory(app: web.Application) -> _CreateLinkCallable: async def _create_permalink( - request: web.Request, project_id: ProjectID + request: web.Request, project_uuid: ProjectID ) -> ProjectPermalink: - create = _get_factory(request.app) + create_coro: CreateLinkCoroutine = _get_factory(request.app) try: permalink: ProjectPermalink = await asyncio.wait_for( - create(request, project_id), timeout=_PERMALINK_CREATE_TIMEOUT_S + create_coro(request=request, project_uuid=project_uuid), + timeout=_PERMALINK_CREATE_TIMEOUT_S, ) return permalink - except asyncio.TimeoutError as err: - msg = f"Permalink factory callback '{create}' timed out after {_PERMALINK_CREATE_TIMEOUT_S} secs" + except TimeoutError as err: + msg = f"Permalink factory callback '{create_coro}' timed out after {_PERMALINK_CREATE_TIMEOUT_S} secs" raise PermalinkFactoryError(msg) from err @@ -61,7 +64,7 @@ async def update_or_pop_permalink_in_project( If fails, it pops it from project (so it is not set in the pydantic model. SEE ProjectGet.permalink) """ try: - permalink = await _create_permalink(request, project_id=project["uuid"]) + permalink = await _create_permalink(request, project_uuid=project["uuid"]) assert permalink # nosec project["permalink"] = permalink @@ -74,4 +77,14 @@ async def update_or_pop_permalink_in_project( return None +async def aggregate_permalink_in_project( + request: web.Request, project: ProjectDict +) -> ProjectDict: + """ + Adapter to use in parallel aggregation of fields in a project dataset + """ + await update_or_pop_permalink_in_project(request, project) + return project + + __all__: tuple[str, ...] = ("ProjectPermalink",) diff --git a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py b/services/web/server/src/simcore_service_webserver/projects/_ports_service.py similarity index 99% rename from services/web/server/src/simcore_service_webserver/projects/_ports_api.py rename to services/web/server/src/simcore_service_webserver/projects/_ports_service.py index 9ae42c397c8..e00998744ba 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_ports_service.py @@ -26,7 +26,7 @@ from models_library.utils.services_io import JsonSchemaDict, get_service_io_json_schema from pydantic import ConfigDict, ValidationError -from ..director_v2.api import get_batch_tasks_outputs +from ..director_v2.director_v2_service import get_batch_tasks_outputs from .exceptions import InvalidInputValue diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_db.py b/services/web/server/src/simcore_service_webserver/projects/_projects_db.py deleted file mode 100644 index 95af2e41284..00000000000 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_db.py +++ /dev/null @@ -1,109 +0,0 @@ -import logging - -import sqlalchemy as sa -from aiohttp import web -from models_library.groups import GroupID -from models_library.projects import ProjectID -from simcore_postgres_database.models.projects import projects -from simcore_postgres_database.models.users import users -from simcore_postgres_database.utils_repos import ( - get_columns_from_db_model, - pass_or_acquire_connection, - transaction_context, -) -from sqlalchemy import sql -from sqlalchemy.ext.asyncio import AsyncConnection - -from ..db.plugin import get_asyncpg_engine -from .exceptions import ProjectNotFoundError -from .models import ProjectDB - -_logger = logging.getLogger(__name__) - - -PROJECT_DB_COLS = get_columns_from_db_model( # noqa: RUF012 - # NOTE: MD: I intentionally didn't include the workbench. There is a special interface - # for the workbench, and at some point, this column should be removed from the table. - # The same holds true for access_rights/ui/classifiers/quality, but we have decided to proceed step by step. - projects, - ProjectDB, -) - - -async def patch_project( - app: web.Application, - connection: AsyncConnection | None = None, - *, - project_uuid: ProjectID, - new_partial_project_data: dict, -) -> ProjectDB: - - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( - projects.update() - .values(last_change_date=sa.func.now(), **new_partial_project_data) - .where(projects.c.uuid == f"{project_uuid}") - .returning(*PROJECT_DB_COLS) - ) - row = await result.first() - if row is None: - raise ProjectNotFoundError(project_uuid=project_uuid) - return ProjectDB.model_validate(row) - - -def _select_trashed_by_primary_gid_query() -> sql.Select: - return sa.select( - users.c.primary_gid.label("trashed_by_primary_gid"), - ).select_from(projects.outerjoin(users, projects.c.trashed_by == users.c.id)) - - -async def get_trashed_by_primary_gid( - app: web.Application, - connection: AsyncConnection | None = None, - *, - projects_uuid: ProjectID, -) -> GroupID | None: - query = _select_trashed_by_primary_gid_query().where( - projects.c.uuid == projects_uuid - ) - - async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - result = await conn.execute(query) - row = result.first() - return row.trashed_by_primary_gid if row else None - - -async def batch_get_trashed_by_primary_gid( - app: web.Application, - connection: AsyncConnection | None = None, - *, - projects_uuids: list[ProjectID], -) -> list[GroupID | None]: - """Batch version of get_trashed_by_primary_gid - - Returns: - values of trashed_by_primary_gid in the SAME ORDER as projects_uuids - """ - if not projects_uuids: - return [] - - projects_uuids_str = [f"{uuid}" for uuid in projects_uuids] - - query = ( - _select_trashed_by_primary_gid_query().where( - projects.c.uuid.in_(projects_uuids_str) - ) - ).order_by( - # Preserves the order of folders_ids - # SEE https://docs.sqlalchemy.org/en/20/core/sqlelement.html#sqlalchemy.sql.expression.case - sa.case( - { - project_uuid: index - for index, project_uuid in enumerate(projects_uuids_str) - }, - value=projects.c.uuid, - ) - ) - async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream(query) - return [row.trashed_by_primary_gid async for row in result] diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py new file mode 100644 index 00000000000..d3312bdc1eb --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py @@ -0,0 +1,211 @@ +import logging +from collections.abc import Callable +from datetime import datetime +from typing import cast + +import sqlalchemy as sa +from aiohttp import web +from common_library.exclude import UnSet, is_set +from models_library.basic_types import IDStr +from models_library.groups import GroupID +from models_library.projects import ProjectID +from models_library.rest_ordering import OrderBy, OrderDirection +from models_library.rest_pagination import MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE +from pydantic import NonNegativeInt, PositiveInt +from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.models.users import users +from simcore_postgres_database.utils_repos import ( + get_columns_from_db_model, + pass_or_acquire_connection, + transaction_context, +) +from sqlalchemy import sql +from sqlalchemy.ext.asyncio import AsyncConnection + +from ..db.plugin import get_asyncpg_engine +from .exceptions import ProjectNotFoundError +from .models import ProjectDBGet + +_logger = logging.getLogger(__name__) + + +PROJECT_DB_COLS = get_columns_from_db_model( + # NOTE: MD: I intentionally didn't include the workbench. There is a special interface + # for the workbench, and at some point, this column should be removed from the table. + # The same holds true for access_rights/ui/classifiers/quality, but we have decided to proceed step by step. + projects, + ProjectDBGet, +) + +OLDEST_TRASHED_FIRST = OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC) + + +def _to_sql_expression(table: sa.Table, order_by: OrderBy): + direction_func: Callable = { + OrderDirection.ASC: sql.asc, + OrderDirection.DESC: sql.desc, + }[order_by.direction] + return direction_func(table.columns[order_by.field]) + + +async def list_trashed_projects( + app: web.Application, + connection: AsyncConnection | None = None, + *, + # filter + trashed_explicitly: bool | UnSet = UnSet.VALUE, + trashed_before: datetime | UnSet = UnSet.VALUE, + # pagination + offset: NonNegativeInt = 0, + limit: PositiveInt = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, + # order + order_by: OrderBy = OLDEST_TRASHED_FIRST, +) -> tuple[int, list[ProjectDBGet]]: + + base_query = sql.select(*PROJECT_DB_COLS).where(projects.c.trashed.is_not(None)) + + if is_set(trashed_explicitly): + assert isinstance(trashed_explicitly, bool) # nosec + base_query = base_query.where( + projects.c.trashed_explicitly.is_(trashed_explicitly) + ) + + if is_set(trashed_before): + assert isinstance(trashed_before, datetime) # nosec + base_query = base_query.where(projects.c.trashed < trashed_before) + + # Select total count from base_query + count_query = sql.select(sql.func.count()).select_from(base_query.subquery()) + + # Ordering and pagination + list_query = ( + base_query.order_by(_to_sql_expression(projects, order_by)) + .offset(offset) + .limit(limit) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) + + result = await conn.stream(list_query) + projects_list: list[ProjectDBGet] = [ + ProjectDBGet.model_validate(row) async for row in result + ] + return cast(int, total_count), projects_list + + +async def get_project( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, +) -> ProjectDBGet: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = sql.select(*PROJECT_DB_COLS).where(projects.c.uuid == f"{project_uuid}") + result = await conn.execute(query) + row = result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectDBGet.model_validate(row) + + +def _select_trashed_by_primary_gid_query() -> sql.Select: + return sql.select( + projects.c.uuid, + users.c.primary_gid.label("trashed_by_primary_gid"), + ).select_from(projects.outerjoin(users, projects.c.trashed_by == users.c.id)) + + +async def get_trashed_by_primary_gid( + app: web.Application, + connection: AsyncConnection | None = None, + *, + projects_uuid: ProjectID, +) -> GroupID | None: + query = _select_trashed_by_primary_gid_query().where( + projects.c.uuid == f"{projects_uuid}" + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(query) + row = result.one_or_none() + return row.trashed_by_primary_gid if row else None + + +async def batch_get_trashed_by_primary_gid( + app: web.Application, + connection: AsyncConnection | None = None, + *, + projects_uuids: list[ProjectID], +) -> list[GroupID | None]: + """Batch version of get_trashed_by_primary_gid + + Returns: + values of trashed_by_primary_gid in the SAME ORDER as projects_uuids + """ + if not projects_uuids: + return [] + + projects_uuids_str = [f"{uuid}" for uuid in projects_uuids] + + query = ( + _select_trashed_by_primary_gid_query().where( + projects.c.uuid.in_(projects_uuids_str) + ) + ).order_by( + # Preserves the order of folders_ids + # SEE https://docs.sqlalchemy.org/en/20/core/sqlelement.html#sqlalchemy.sql.expression.case + sql.case( + { + project_uuid: index + for index, project_uuid in enumerate(projects_uuids_str) + }, + value=projects.c.uuid, + ) + ) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(query) + rows = {row.uuid: row.trashed_by_primary_gid async for row in result} + + return [rows.get(project_uuid) for project_uuid in projects_uuids_str] + + +async def patch_project( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, + new_partial_project_data: dict, +) -> ProjectDBGet: + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( + projects.update() + .values( + **new_partial_project_data, + last_change_date=sql.func.now(), + ) + .where(projects.c.uuid == f"{project_uuid}") + .returning(*PROJECT_DB_COLS) + ) + row = await result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectDBGet.model_validate(row) + + +async def delete_project( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, +) -> ProjectDBGet: + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( + projects.delete() + .where(projects.c.uuid == f"{project_uuid}") + .returning(*PROJECT_DB_COLS) + ) + row = await result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectDBGet.model_validate(row) diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py similarity index 98% rename from services/web/server/src/simcore_service_webserver/projects/db.py rename to services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index 66f285fcfa8..e322c947a1a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -1,7 +1,7 @@ -""" Database API +"""Database API - - Adds a layer to the postgres API with a focus on the projects data - - Shall be used as entry point for all the queries to the database regarding projects +- Adds a layer to the postgres API with a focus on the projects data +- Shall be used as entry point for all the queries to the database regarding projects """ @@ -36,7 +36,7 @@ from pydantic.types import PositiveInt from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY from servicelib.logging_utils import get_log_record_extra, log_context -from simcore_postgres_database.errors import UniqueViolation +from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.project_to_groups import project_to_groups from simcore_postgres_database.models.projects_nodes import projects_nodes @@ -66,7 +66,7 @@ from tenacity.retry import retry_if_exception_type from ..utils import now_str -from ._comments_db import ( +from ._comments_repository import ( create_project_comment, delete_project_comment, get_project_comment, @@ -74,7 +74,8 @@ total_project_comments, update_project_comment, ) -from ._db_utils import ( +from ._projects_repository import PROJECT_DB_COLS +from ._projects_repository_legacy_utils import ( ANY_USER_ID_SENTINEL, BaseProjectDB, ProjectAccessRights, @@ -85,7 +86,6 @@ patch_workbench, update_workbench, ) -from ._projects_db import PROJECT_DB_COLS from .exceptions import ( ProjectDeleteError, ProjectInvalidRightsError, @@ -93,10 +93,10 @@ ProjectNotFoundError, ) from .models import ( - ProjectDB, + ProjectDBGet, ProjectDict, UserProjectAccessRightsDB, - UserSpecificProjectDataDB, + UserSpecificProjectDataDBGet, ) _logger = logging.getLogger(__name__) @@ -307,7 +307,6 @@ async def insert_project( # All non-default in projects table insert_values.setdefault("name", "New Study") insert_values.setdefault("workbench", {}) - insert_values.setdefault("workspace_id", None) # must be valid uuid @@ -765,7 +764,7 @@ async def get_project_dict_and_type( project_type, ) - async def get_project_db(self, project_uuid: ProjectID) -> ProjectDB: + async def get_project_db(self, project_uuid: ProjectID) -> ProjectDBGet: async with self.engine.acquire() as conn: result = await conn.execute( sa.select( @@ -776,11 +775,11 @@ async def get_project_db(self, project_uuid: ProjectID) -> ProjectDB: row = await result.fetchone() if row is None: raise ProjectNotFoundError(project_uuid=project_uuid) - return ProjectDB.model_validate(row) + return ProjectDBGet.model_validate(row) async def get_user_specific_project_data_db( self, project_uuid: ProjectID, private_workspace_user_id_or_none: UserID | None - ) -> UserSpecificProjectDataDB: + ) -> UserSpecificProjectDataDBGet: async with self.engine.acquire() as conn: result = await conn.execute( sa.select( @@ -805,7 +804,7 @@ async def get_user_specific_project_data_db( row = await result.fetchone() if row is None: raise ProjectNotFoundError(project_uuid=project_uuid) - return UserSpecificProjectDataDB.model_validate(row) + return UserSpecificProjectDataDBGet.model_validate(row) async def get_pure_project_access_rights_without_workspace( self, user_id: UserID, project_uuid: ProjectID diff --git a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py similarity index 99% rename from services/web/server/src/simcore_service_webserver/projects/_db_utils.py rename to services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py index ecc88a5a59e..c92c5910d06 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py @@ -24,7 +24,7 @@ from ..db.models import GroupType, groups, projects_tags, user_to_groups, users from ..users.exceptions import UserNotFoundError from ..utils import format_datetime -from ._projects_db import PROJECT_DB_COLS +from ._projects_repository import PROJECT_DB_COLS from .exceptions import ( NodeNotFoundError, ProjectInvalidRightsError, diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py similarity index 93% rename from services/web/server/src/simcore_service_webserver/projects/projects_service.py rename to services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 112655b6d03..f079b8fb5f6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -94,11 +94,10 @@ from simcore_postgres_database.webserver_models import ProjectType from ..application_settings import get_application_settings -from ..catalog import client as catalog_client -from ..director_v2 import api as director_v2_api -from ..dynamic_scheduler import api as dynamic_scheduler_api -from ..products import api as products_api -from ..products.api import get_product_name +from ..catalog import catalog_service +from ..director_v2 import director_v2_service +from ..dynamic_scheduler import api as dynamic_scheduler_service +from ..products import products_web from ..rabbitmq import get_rabbitmq_rpc_client from ..redis import get_redis_lock_manager_client_sdk from ..resource_manager.user_sessions import ( @@ -113,7 +112,7 @@ send_message_to_standard_group, send_message_to_user, ) -from ..storage import api as storage_api +from ..storage import api as storage_service from ..users.api import FullNameDict, get_user, get_user_fullname, get_user_role from ..users.exceptions import UserNotFoundError from ..users.preferences_api import ( @@ -121,23 +120,23 @@ UserDefaultWalletNotFoundError, get_frontend_user_preference, ) -from ..wallets import api as wallets_api +from ..wallets import api as wallets_service from ..wallets.errors import WalletNotEnoughCreditsError -from ..workspaces import _workspaces_repository as workspaces_db +from ..workspaces import _workspaces_repository as workspaces_workspaces_repository from . import ( _crud_api_delete, - _nodes_api, - _projects_db, + _nodes_service, _projects_nodes_repository, - _wallets_api, + _projects_repository, + _wallets_service, ) -from ._access_rights_api import ( +from ._access_rights_service import ( check_user_project_permission, has_user_project_access_rights, ) -from ._db_utils import PermissionStr from ._nodes_utils import set_reservation_same_as_limit, validate_new_service_resources -from .db import APP_PROJECT_DBAPI, ProjectDBAPI +from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI +from ._projects_repository_legacy_utils import PermissionStr from .exceptions import ( ClustersKeeperNotAvailableError, DefaultPricingUnitNotFoundError, @@ -211,7 +210,10 @@ async def get_project_for_user( # adds state if it is not a template if include_state: project = await add_project_states_for_user( - user_id, project, project_type is ProjectType.TEMPLATE, app + user_id=user_id, + project=project, + is_template=project_type is ProjectType.TEMPLATE, + app=app, ) # adds `trashed_by_primary_gid` @@ -220,14 +222,14 @@ async def get_project_for_user( and project.get("trashed_by", project.get("trashedBy")) is not None ): project.update( - trashedByPrimaryGid=await _projects_db.get_trashed_by_primary_gid( + trashedByPrimaryGid=await _projects_repository.get_trashed_by_primary_gid( app, projects_uuid=project["uuid"] ) ) if project["workspaceId"] is not None: workspace: UserWorkspaceWithAccessRights = ( - await workspaces_db.get_workspace_for_user( + await workspaces_workspaces_repository.get_workspace_for_user( app=app, user_id=user_id, workspace_id=project["workspaceId"], @@ -306,7 +308,7 @@ async def patch_project( raise ProjectOwnerNotFoundInTheProjectAccessRightsError # 4. Patch the project - await _projects_db.patch_project( + await _projects_repository.patch_project( app=app, project_uuid=project_uuid, new_partial_project_data=patch_project_data, @@ -318,6 +320,33 @@ async def patch_project( # +async def delete_project_by_user( + app: web.Application, + *, + project_uuid: ProjectID, + user_id: UserID, + simcore_user_agent: str = UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + wait_until_completed: bool = True, +) -> None: + task = await submit_delete_project_task( + app, + project_uuid=project_uuid, + user_id=user_id, + simcore_user_agent=simcore_user_agent, + ) + if wait_until_completed: + await task + + +def get_delete_project_task( + project_uuid: ProjectID, user_id: UserID +) -> asyncio.Task | None: + if tasks := _crud_api_delete.get_scheduled_tasks(project_uuid, user_id): + assert len(tasks) == 1, f"{tasks=}" # nosec + return tasks[0] + return None + + async def submit_delete_project_task( app: web.Application, project_uuid: ProjectID, @@ -353,15 +382,6 @@ async def submit_delete_project_task( return task -def get_delete_project_task( - project_uuid: ProjectID, user_id: UserID -) -> asyncio.Task | None: - if tasks := _crud_api_delete.get_scheduled_tasks(project_uuid, user_id): - assert len(tasks) == 1, f"{tasks=}" # nosec - return tasks[0] - return None - - # # PROJECT NODES ----------------------------------------------------- # @@ -395,9 +415,9 @@ async def _get_default_pricing_and_hardware_info( ) -_MACHINE_TOTAL_RAM_SAFE_MARGIN_RATIO: Final[ - float -] = 0.1 # NOTE: machines always have less available RAM than advertised +_MACHINE_TOTAL_RAM_SAFE_MARGIN_RATIO: Final[float] = ( + 0.1 # NOTE: machines always have less available RAM than advertised +) _SIDECARS_OPS_SAFE_RAM_MARGIN: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( "1GiB" ) @@ -420,11 +440,11 @@ async def update_project_node_resources_from_hardware_info( return try: rabbitmq_rpc_client = get_rabbitmq_rpc_client(app) - unordered_list_ec2_instance_types: list[ - EC2InstanceTypeGet - ] = await get_instance_type_details( - rabbitmq_rpc_client, - instance_type_names=set(hardware_info.aws_ec2_instances), + unordered_list_ec2_instance_types: list[EC2InstanceTypeGet] = ( + await get_instance_type_details( + rabbitmq_rpc_client, + instance_type_names=set(hardware_info.aws_ec2_instances), + ) ) assert unordered_list_ec2_instance_types # nosec @@ -612,19 +632,19 @@ async def _start_dynamic_service( # noqa: C901 @exclusive( get_redis_lock_manager_client_sdk(request.app), - lock_key=_nodes_api.get_service_start_lock_key(user_id, project_uuid), + lock_key=_nodes_service.get_service_start_lock_key(user_id, project_uuid), blocking=True, blocking_timeout=datetime.timedelta( - seconds=_nodes_api.get_total_project_dynamic_nodes_creation_interval( + seconds=_nodes_service.get_total_project_dynamic_nodes_creation_interval( get_plugin_settings(request.app).PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES ) ), ) async def _() -> None: - project_running_nodes = await dynamic_scheduler_api.list_dynamic_services( + project_running_nodes = await dynamic_scheduler_service.list_dynamic_services( request.app, user_id=user_id, project_id=project_uuid ) - _nodes_api.check_num_service_per_projects_limit( + _nodes_service.check_num_service_per_projects_limit( app=request.app, number_of_services=len(project_running_nodes), user_id=user_id, @@ -633,14 +653,14 @@ async def _() -> None: # Get wallet/pricing/hardware information wallet_info, pricing_info, hardware_info = None, None, None - product = products_api.get_current_product(request) + product = products_web.get_current_product(request) app_settings = get_application_settings(request.app) if ( product.is_payment_enabled and app_settings.WEBSERVER_CREDIT_COMPUTATION_ENABLED ): # Deal with Wallet - project_wallet = await _wallets_api.get_project_wallet( + project_wallet = await _wallets_service.get_project_wallet( request.app, project_id=project_uuid ) if project_wallet is None: @@ -655,7 +675,7 @@ async def _() -> None: project_wallet_id = TypeAdapter(WalletID).validate_python( user_default_wallet_preference.value ) - await _wallets_api.connect_wallet_to_project( + await _wallets_service.connect_wallet_to_project( request.app, product_name=product_name, project_id=project_uuid, @@ -665,13 +685,11 @@ async def _() -> None: else: project_wallet_id = project_wallet.wallet_id # Check whether user has access to the wallet - wallet = ( - await wallets_api.get_wallet_with_available_credits_by_user_and_wallet( - request.app, - user_id=user_id, - wallet_id=project_wallet_id, - product_name=product_name, - ) + wallet = await wallets_service.get_wallet_with_available_credits_by_user_and_wallet( + request.app, + user_id=user_id, + wallet_id=project_wallet_id, + product_name=product_name, ) wallet_info = WalletInfo( wallet_id=project_wallet_id, @@ -746,7 +764,7 @@ async def _() -> None: service_key=service_key, service_version=service_version, ) - await dynamic_scheduler_api.run_dynamic_service( + await dynamic_scheduler_service.run_dynamic_service( app=request.app, dynamic_service_start=DynamicServiceStart( product_name=product_name, @@ -800,7 +818,7 @@ async def add_project_node( ) node_uuid = NodeID(service_id if service_id else f"{uuid4()}") - default_resources = await catalog_client.get_service_resources( + default_resources = await catalog_service.get_service_resources( request.app, user_id, service_key, service_version ) db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) @@ -827,10 +845,10 @@ async def add_project_node( # also ensure the project is updated by director-v2 since services # are due to access comp_tasks at some point see [https://github.com/ITISFoundation/osparc-simcore/issues/3216] - await director_v2_api.create_or_update_pipeline( + await director_v2_service.create_or_update_pipeline( request.app, user_id, project["uuid"], product_name ) - await dynamic_scheduler_api.update_projects_networks( + await dynamic_scheduler_service.update_projects_networks( request.app, project_id=ProjectID(project["uuid"]) ) @@ -885,7 +903,7 @@ async def _remove_service_and_its_data_folders( ) -> None: if stop_service: # no need to save the state of the node when deleting it - await dynamic_scheduler_api.stop_dynamic_service( + await dynamic_scheduler_service.stop_dynamic_service( app, dynamic_service_stop=DynamicServiceStop( user_id=user_id, @@ -897,7 +915,7 @@ async def _remove_service_and_its_data_folders( ) # remove the node's data if any - await storage_api.delete_data_folders_of_project_node( + await storage_service.delete_data_folders_of_project_node( app, f"{project_uuid}", node_uuid, user_id ) @@ -921,10 +939,12 @@ async def delete_project_node( permission="write", ) - list_running_dynamic_services = await dynamic_scheduler_api.list_dynamic_services( - request.app, - user_id=user_id, - project_id=project_uuid, + list_running_dynamic_services = ( + await dynamic_scheduler_service.list_dynamic_services( + request.app, + user_id=user_id, + project_id=project_uuid, + ) ) fire_and_forget_task( @@ -949,11 +969,11 @@ async def delete_project_node( assert db # nosec await db.remove_project_node(user_id, project_uuid, NodeID(node_uuid)) # also ensure the project is updated by director-v2 since services - product_name = get_product_name(request) - await director_v2_api.create_or_update_pipeline( + product_name = products_web.get_product_name(request) + await director_v2_service.create_or_update_pipeline( request.app, user_id, project_uuid, product_name ) - await dynamic_scheduler_api.update_projects_networks( + await dynamic_scheduler_service.update_projects_networks( request.app, project_id=project_uuid ) @@ -1080,11 +1100,13 @@ async def patch_project_node( ) # 4. Make calls to director-v2 to keep data in sync (ex. comp_tasks DB table) - await director_v2_api.create_or_update_pipeline( + await director_v2_service.create_or_update_pipeline( app, user_id, project_id, product_name=product_name ) if _node_patch_exclude_unset.get("label"): - await dynamic_scheduler_api.update_projects_networks(app, project_id=project_id) + await dynamic_scheduler_service.update_projects_networks( + app, project_id=project_id + ) # 5. Updates project states for user, if inputs/outputs have been changed if {"inputs", "outputs"} & _node_patch_exclude_unset.keys(): @@ -1188,7 +1210,7 @@ async def _safe_retrieve( app: web.Application, node_id: NodeID, port_keys: list[str] ) -> None: try: - await dynamic_scheduler_api.retrieve_inputs(app, node_id, port_keys) + await dynamic_scheduler_service.retrieve_inputs(app, node_id, port_keys) except RPCServerError as exc: log.warning( "Unable to call :retrieve endpoint on service %s, keys: [%s]: error: [%s]", @@ -1338,7 +1360,7 @@ async def _open_project() -> bool: for uuid in await user_session.find_all_resources_of_user( PROJECT_ID_KEY ) - if uuid != project_uuid + if uuid != f"{project_uuid}" } ) >= max_number_of_studies_per_user @@ -1349,10 +1371,10 @@ async def _open_project() -> bool: # Assign project_id to current_session current_session: UserSessionID = user_session.get_id() - sessions_with_project: list[ - UserSessionID - ] = await user_session.find_users_of_resource( - app, PROJECT_ID_KEY, f"{project_uuid}" + sessions_with_project: list[UserSessionID] = ( + await user_session.find_users_of_resource( + app, PROJECT_ID_KEY, f"{project_uuid}" + ) ) if not sessions_with_project: # no one has the project so we assign it @@ -1401,10 +1423,10 @@ async def try_close_project_for_user( ): with managed_resource(user_id, client_session_id, app) as user_session: current_session: UserSessionID = user_session.get_id() - all_sessions_with_project: list[ - UserSessionID - ] = await user_session.find_users_of_resource( - app, key=PROJECT_ID_KEY, value=project_uuid + all_sessions_with_project: list[UserSessionID] = ( + await user_session.find_users_of_resource( + app, key=PROJECT_ID_KEY, value=project_uuid + ) ) # first check whether other sessions registered this project @@ -1531,11 +1553,10 @@ async def get_project_states_for_user( user_id: int, project_uuid: str, app: web.Application ) -> ProjectState: # for templates: the project is never locked and never opened. also the running state is always unknown - lock_state = ProjectLocked(value=False, status=ProjectStatus.CLOSED) running_state = RunningState.UNKNOWN lock_state, computation_task = await logged_gather( _get_project_lock_state(user_id, project_uuid, app), - director_v2_api.get_computation_task(app, user_id, UUID(project_uuid)), + director_v2_service.get_computation_task(app, user_id, UUID(project_uuid)), ) if computation_task: # get the running state @@ -1547,6 +1568,7 @@ async def get_project_states_for_user( async def add_project_states_for_user( + *, user_id: int, project: ProjectDict, is_template: bool, @@ -1562,7 +1584,7 @@ async def add_project_states_for_user( running_state = RunningState.UNKNOWN if not is_template and ( - computation_task := await director_v2_api.get_computation_task( + computation_task := await director_v2_service.get_computation_task( app, user_id, project["uuid"] ) ): @@ -1599,7 +1621,7 @@ async def is_service_deprecated( service_version: str, product_name: str, ) -> bool: - service = await catalog_client.get_service( + service = await catalog_service.get_service( app, user_id, service_key, service_version, product_name ) if deprecation_date := service.get("deprecated"): @@ -1648,7 +1670,7 @@ async def get_project_node_resources( ) if not node_resources: # get default resources - node_resources = await catalog_client.get_service_resources( + node_resources = await catalog_service.get_service_resources( app, user_id, service_key, service_version ) return node_resources @@ -1679,7 +1701,7 @@ async def update_project_node_resources( if not current_resources: # NOTE: this can happen after the migration # get default resources - current_resources = await catalog_client.get_service_resources( + current_resources = await catalog_service.get_service_resources( app, user_id, service_key, service_version ) @@ -1718,7 +1740,7 @@ async def run_project_dynamic_services( project_settings: ProjectsSettings = get_plugin_settings(request.app) running_services_uuids: list[NodeIDStr] = [ f"{d.node_uuid}" - for d in await dynamic_scheduler_api.list_dynamic_services( + for d in await dynamic_scheduler_service.list_dynamic_services( request.app, user_id=user_id, project_id=ProjectID(project["uuid"]) ) ] @@ -1834,7 +1856,7 @@ async def _locked_stop_dynamic_serivces_in_project() -> None: ServiceWasNotFoundError, ): # here RPC exceptions are suppressed. in case the service is not found to preserve old behavior - await dynamic_scheduler_api.stop_dynamic_services_in_project( + await dynamic_scheduler_service.stop_dynamic_services_in_project( app=app, user_id=user_id, project_id=project_uuid, @@ -1925,7 +1947,7 @@ async def get_project_inactivity( app: web.Application, project_id: ProjectID ) -> GetProjectInactivityResponse: project_settings: ProjectsSettings = get_plugin_settings(app) - return await dynamic_scheduler_api.get_project_inactivity( + return await dynamic_scheduler_service.get_project_inactivity( app, project_id=project_id, # NOTE: project is considered inactive if all services exposing an /inactivity diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py new file mode 100644 index 00000000000..bd8d57886e4 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py @@ -0,0 +1,106 @@ +import asyncio +import logging +import time +from contextlib import contextmanager +from typing import Any, Protocol + +from aiohttp import web +from models_library.projects import ProjectID +from models_library.users import UserID +from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.redis._errors import ProjectLockError + +from ..director_v2 import director_v2_service +from . import _projects_repository, _projects_service +from .exceptions import ProjectDeleteError, ProjectNotFoundError + +_logger = logging.getLogger(__name__) + + +@contextmanager +def _monitor_step(steps: dict[str, Any], *, name: str, elapsed: bool = False): + # util + start_time = time.perf_counter() + steps[name] = {"status": "starting"} + try: + yield + except Exception as exc: + steps[name]["status"] = "raised" + steps[name]["exception"] = f"{exc.__class__.__name__}:{exc}" + raise + else: + steps[name]["status"] = "success" + finally: + if elapsed: + steps[name]["elapsed"] = time.perf_counter() - start_time + + +class StopServicesCallback(Protocol): + async def __call__(self, app: web.Application, project_uuid: ProjectID) -> None: ... + + +async def batch_stop_services_in_project( + app: web.Application, *, user_id: UserID, project_uuid: ProjectID +) -> None: + await asyncio.gather( + director_v2_service.stop_pipeline( + app, user_id=user_id, project_id=project_uuid + ), + _projects_service.remove_project_dynamic_services( + user_id=user_id, + project_uuid=f"{project_uuid}", + app=app, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + notify_users=False, + ), + ) + + +async def delete_project_as_admin( + app: web.Application, + *, + project_uuid: ProjectID, +): + + state: dict[str, Any] = {} + + try: + # 1. hide + with _monitor_step(state, name="hide"): + project = await _projects_repository.patch_project( + app, + project_uuid=project_uuid, + new_partial_project_data={"hidden": True}, + ) + + # 2. stop + with _monitor_step(state, name="stop", elapsed=True): + # NOTE: this callback could take long or raise whatever! + await batch_stop_services_in_project( + app, user_id=project.prj_owner, project_uuid=project_uuid + ) + + # 3. delete + with _monitor_step(state, name="delete"): + await _projects_repository.delete_project(app, project_uuid=project_uuid) + + except ProjectNotFoundError as err: + _logger.debug( + "Project %s being deleted is already gone. IGNORING error. Details: %s", + project_uuid, + err, + ) + + except ProjectLockError as err: + raise ProjectDeleteError( + project_uuid=project_uuid, + reason=f"Cannot delete project {project_uuid} because it is currently in use. Details: {err}", + state=state, + ) from err + + except Exception as err: + raise ProjectDeleteError( + project_uuid=project_uuid, + reason=f"Unexpected error. Deletion sequence: {state=}", + state=state, + ) from err diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_access.py b/services/web/server/src/simcore_service_webserver/projects/_security_service.py similarity index 78% rename from services/web/server/src/simcore_service_webserver/projects/_projects_access.py rename to services/web/server/src/simcore_service_webserver/projects/_security_service.py index c0054dc5c3f..f3c176b4de8 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_access.py +++ b/services/web/server/src/simcore_service_webserver/projects/_security_service.py @@ -2,12 +2,12 @@ from aiohttp import web from simcore_postgres_database.models.users import UserRole -from ..projects.api import check_user_project_permission from ..security.api import get_access_model -from .db import ProjectDBAPI +from ._projects_repository_legacy import ProjectDBAPI +from .api import check_user_project_permission -async def can_update_node_inputs(context): +async def _can_update_node_inputs(context): """Check function associated to "project.workbench.node.inputs.update" permission label Returns True if user has permission to update inputs @@ -34,17 +34,13 @@ async def can_update_node_inputs(context): diffs = jsondiff.diff(current_project, updated_project) - # TODO: depends on schema. Shall change if schema changes!? if "workbench" in diffs: try: for node in diffs["workbench"]: # can ONLY modify `inputs` fields set as ReadAndWrite access = current_project["workbench"][node]["inputAccess"] inputs = diffs["workbench"][node]["inputs"] - for key in inputs: - if access.get(key) != "ReadAndWrite": - return False - return True + return all(access.get(key) == "ReadAndWrite" for key in inputs) except KeyError: pass return False @@ -58,7 +54,6 @@ def setup_projects_access(app: web.Application): """ hrba = get_access_model(app) - # TODO: add here also named permissions, i.e. all project.* operations hrba.roles[UserRole.GUEST].check[ "project.workbench.node.inputs.update" - ] = can_update_node_inputs + ] = _can_update_node_inputs diff --git a/services/web/server/src/simcore_service_webserver/projects/_tags_api.py b/services/web/server/src/simcore_service_webserver/projects/_tags_service.py similarity index 84% rename from services/web/server/src/simcore_service_webserver/projects/_tags_api.py rename to services/web/server/src/simcore_service_webserver/projects/_tags_service.py index 93bf232706b..d7f1af590a2 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_tags_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_tags_service.py @@ -1,6 +1,4 @@ -""" Handlers for CRUD operations on /projects/{*}/tags/{*} - -""" +"""Handlers for CRUD operations on /projects/{*}/tags/{*}""" import logging @@ -9,9 +7,9 @@ from models_library.users import UserID from models_library.workspaces import UserWorkspaceWithAccessRights -from ..workspaces import _workspaces_repository as workspaces_db -from ._access_rights_api import check_user_project_permission -from .db import ProjectDBAPI +from ..workspaces import _workspaces_repository as workspaces_workspaces_repository +from ._access_rights_service import check_user_project_permission +from ._projects_repository_legacy import ProjectDBAPI from .models import ProjectDict _logger = logging.getLogger(__name__) @@ -37,7 +35,7 @@ async def add_tag( if project["workspaceId"] is not None: workspace: UserWorkspaceWithAccessRights = ( - await workspaces_db.get_workspace_for_user( + await workspaces_workspaces_repository.get_workspace_for_user( app=app, user_id=user_id, workspace_id=project["workspaceId"], @@ -71,7 +69,7 @@ async def remove_tag( if project["workspaceId"] is not None: workspace: UserWorkspaceWithAccessRights = ( - await workspaces_db.get_workspace_for_user( + await workspaces_workspaces_repository.get_workspace_for_user( app=app, user_id=user_id, workspace_id=project["workspaceId"], diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py index cc6be0de495..a70a52937bb 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py @@ -1,21 +1,31 @@ -import asyncio import logging +from datetime import datetime import arrow from aiohttp import web +from common_library.pagination_tools import iter_pagination_params +from models_library.basic_types import IDStr from models_library.products import ProductName from models_library.projects import ProjectID +from models_library.rest_ordering import OrderBy, OrderDirection +from models_library.rest_pagination import MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE from models_library.users import UserID from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY -from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.utils import fire_and_forget_task -from ..director_v2 import api as director_v2_api -from ..dynamic_scheduler import api as dynamic_scheduler_api -from . import projects_service -from ._access_rights_api import check_user_project_permission -from .exceptions import ProjectRunningConflictError -from .models import ProjectPatchInternalExtended +from ..director_v2 import director_v2_service +from ..dynamic_scheduler import api as dynamic_scheduler_service +from . import _crud_api_read +from . import _projects_repository as _projects_repository +from . import _projects_service, _projects_service_delete +from ._access_rights_service import check_user_project_permission +from .exceptions import ( + ProjectNotFoundError, + ProjectNotTrashedError, + ProjectRunningConflictError, + ProjectsBatchDeleteError, +) +from .models import ProjectDict, ProjectPatchInternalExtended _logger = logging.getLogger(__name__) @@ -27,11 +37,11 @@ async def _is_project_running( project_id: ProjectID, ) -> bool: return bool( - await director_v2_api.is_pipeline_running( + await director_v2_service.is_pipeline_running( app, user_id=user_id, project_id=project_id ) ) or bool( - await dynamic_scheduler_api.list_dynamic_services( + await dynamic_scheduler_service.list_dynamic_services( app, user_id=user_id, project_id=project_id ) ) @@ -45,7 +55,7 @@ async def trash_project( project_id: ProjectID, force_stop_first: bool, explicit: bool, -): +) -> None: """ Raises: @@ -62,22 +72,10 @@ async def trash_project( if force_stop_first: - async def _schedule(): - await asyncio.gather( - director_v2_api.stop_pipeline( - app, user_id=user_id, project_id=project_id - ), - projects_service.remove_project_dynamic_services( - user_id=user_id, - project_uuid=f"{project_id}", - app=app, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - notify_users=False, - ), - ) - fire_and_forget_task( - _schedule(), + _projects_service_delete.batch_stop_services_in_project( + app, user_id=user_id, project_uuid=project_id + ), task_suffix_name=f"trash_project_force_stop_first_{user_id=}_{project_id=}", fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], ) @@ -89,7 +87,7 @@ async def _schedule(): product_name=product_name, ) - await projects_service.patch_project( + await _projects_service.patch_project( app, user_id=user_id, product_name=product_name, @@ -108,9 +106,9 @@ async def untrash_project( product_name: ProductName, user_id: UserID, project_id: ProjectID, -): +) -> None: # NOTE: check_user_project_permission is inside projects_api.patch_project - await projects_service.patch_project( + await _projects_service.patch_project( app, user_id=user_id, product_name=product_name, @@ -119,3 +117,160 @@ async def untrash_project( trashed_at=None, trashed_explicitly=False, trashed_by=None ), ) + + +def _can_delete( + project: ProjectDict, + user_id: UserID, + until_equal_datetime: datetime | None, +) -> bool: + """ + This is the current policy to delete trashed project + + """ + trashed_at = project.get("trashed") + trashed_by = project.get("trashedBy") + trashed_explicitly = project.get("trashedExplicitly") + + assert trashed_at is not None # nosec + assert trashed_by is not None # nosec + + is_shared = len(project["accessRights"]) > 1 + + return bool( + trashed_at + and (until_equal_datetime is None or trashed_at < until_equal_datetime) + # NOTE: current policy is more restricted until + # logic is adapted to deal with the other cases + and trashed_by == user_id + and not is_shared + and trashed_explicitly + ) + + +async def list_explicitly_trashed_projects( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + until_equal_datetime: datetime | None = None, +) -> list[ProjectID]: + """ + Lists all projects that were trashed until a specific datetime (if !=None). + """ + trashed_projects: list[ProjectID] = [] + + for page_params in iter_pagination_params(limit=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE): + ( + projects, + page_params.total_number_of_items, + ) = await _crud_api_read.list_projects_full_depth( + app, + user_id=user_id, + product_name=product_name, + trashed=True, + tag_ids_list=[], + offset=page_params.offset, + limit=page_params.limit, + order_by=OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC), + search_by_multi_columns=None, + search_by_project_name=None, + ) + + # NOTE: Applying POST-FILTERING because we do not want to modify the interface of + # _crud_api_read.list_projects_full_depth at this time. + # This filtering couldn't be handled at the database level when `projects_repo` + # was refactored, as defining a custom trash_filter was needed to allow more + # flexibility in filtering options. + trashed_projects.extend( + [ + project["uuid"] + for project in projects + if _can_delete(project, user_id, until_equal_datetime) + ] + ) + return trashed_projects + + +async def delete_explicitly_trashed_project( + app: web.Application, + *, + user_id: UserID, + project_id: ProjectID, + until_equal_datetime: datetime | None = None, +) -> None: + """ + Deletes a project that was explicitly trashed by the user from a specific datetime (if provided, otherwise all). + + Raises: + ProjectNotFoundError: If the project is not found. + ProjectNotTrashedError: If the project was not trashed explicitly by the user from the specified datetime. + """ + project = await _projects_service.get_project_for_user( + app, project_uuid=f"{project_id}", user_id=user_id + ) + + if not project: + raise ProjectNotFoundError(project_uuid=project_id, user_id=user_id) + + if not _can_delete(project, user_id, until_equal_datetime): + # safety check + raise ProjectNotTrashedError( + project_uuid=project_id, + user_id=user_id, + reason="Cannot delete trashed project since it does not fit current criteria", + ) + + await _projects_service.delete_project_by_user( + app, + user_id=user_id, + project_uuid=project_id, + ) + + +async def batch_delete_trashed_projects_as_admin( + app: web.Application, + *, + trashed_before: datetime, + fail_fast: bool, +) -> list[ProjectID]: + + deleted_project_ids: list[ProjectID] = [] + errors: list[tuple[ProjectID, Exception]] = [] + + for page_params in iter_pagination_params(limit=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE): + ( + page_params.total_number_of_items, + expired_trashed_projects, + ) = await _projects_repository.list_trashed_projects( + app, + # both implicit and explicitly trashed + trashed_before=trashed_before, + offset=page_params.offset, + limit=page_params.limit, + order_by=_projects_repository.OLDEST_TRASHED_FIRST, + ) + # BATCH delete + for project in expired_trashed_projects: + + assert project.trashed # nosec + + try: + await _projects_service_delete.delete_project_as_admin( + app, + project_uuid=project.uuid, + ) + deleted_project_ids.append(project.uuid) + except Exception as err: # pylint: disable=broad-exception-caught + if fail_fast: + raise + errors.append((project.uuid, err)) + + if errors: + raise ProjectsBatchDeleteError( + errors=errors, + trashed_before=trashed_before, + deleted_project_ids=deleted_project_ids, + ) + + return deleted_project_ids diff --git a/services/web/server/src/simcore_service_webserver/projects/_wallets_api.py b/services/web/server/src/simcore_service_webserver/projects/_wallets_service.py similarity index 94% rename from services/web/server/src/simcore_service_webserver/projects/_wallets_api.py rename to services/web/server/src/simcore_service_webserver/projects/_wallets_service.py index 1610cb4c363..e671b7eac6e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_wallets_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_wallets_service.py @@ -16,9 +16,9 @@ ) from ..rabbitmq import get_rabbitmq_rpc_client -from ..users import api as users_api -from ..wallets import _api as wallets_api -from .db import ProjectDBAPI +from ..users import api as users_service +from ..wallets import _api as wallets_service +from ._projects_repository_legacy import ProjectDBAPI from .exceptions import ( ProjectInDebtCanNotChangeWalletError, ProjectInDebtCanNotOpenError, @@ -73,7 +73,7 @@ async def connect_wallet_to_project( db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(app) # ensure the wallet can be used by the user - wallet: WalletGet = await wallets_api.get_wallet_by_user( + wallet: WalletGet = await wallets_service.get_wallet_by_user( app, user_id=user_id, wallet_id=wallet_id, @@ -153,20 +153,20 @@ async def pay_debt_with_different_wallet( assert current_wallet_id != new_wallet_id # nosec # ensure the wallets can be used by the user - new_wallet: WalletGet = await wallets_api.get_wallet_by_user( + new_wallet: WalletGet = await wallets_service.get_wallet_by_user( app, user_id=user_id, wallet_id=new_wallet_id, product_name=product_name, ) - current_wallet: WalletGet = await wallets_api.get_wallet_by_user( + current_wallet: WalletGet = await wallets_service.get_wallet_by_user( app, user_id=user_id, wallet_id=current_wallet_id, product_name=product_name, ) - user = await users_api.get_user(app, user_id=user_id) + user = await users_service.get_user(app, user_id=user_id) # Transfer credits from the source wallet to the connected wallet rpc_client = get_rabbitmq_rpc_client(app) diff --git a/services/web/server/src/simcore_service_webserver/projects/_workspaces_api.py b/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py similarity index 83% rename from services/web/server/src/simcore_service_webserver/projects/_workspaces_api.py rename to services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py index 1462168fa52..fdf40f27371 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_workspaces_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py @@ -8,12 +8,10 @@ from simcore_postgres_database.utils_repos import transaction_context from ..db.plugin import get_asyncpg_engine -from ..projects._access_rights_api import get_user_project_access_rights from ..users.api import get_user from ..workspaces.api import check_user_workspace_access -from . import _folders_db as project_to_folders_db -from . import _groups_db as project_groups_db -from . import _projects_db +from . import _folders_repository, _groups_repository, _projects_repository +from ._access_rights_service import get_user_project_access_rights from .exceptions import ProjectInvalidRightsError _logger = logging.getLogger(__name__) @@ -46,14 +44,14 @@ async def move_project_into_workspace( async with transaction_context(get_asyncpg_engine(app)) as conn: # 3. Delete project to folders (for everybody) - await project_to_folders_db.delete_all_project_to_folder_by_project_id( + await _folders_repository.delete_all_project_to_folder_by_project_id( app, connection=conn, project_id=project_id, ) # 4. Update workspace ID on the project resource - await _projects_db.patch_project( + await _projects_repository.patch_project( app=app, connection=conn, project_uuid=project_id, @@ -62,10 +60,10 @@ async def move_project_into_workspace( # 5. Remove all project permissions, leave only the user who moved the project user = await get_user(app, user_id=user_id) - await project_groups_db.delete_all_project_groups( + await _groups_repository.delete_all_project_groups( app, connection=conn, project_id=project_id ) - await project_groups_db.update_or_insert_project_group( + await _groups_repository.update_or_insert_project_group( app, connection=conn, project_id=project_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/api.py b/services/web/server/src/simcore_service_webserver/projects/api.py index ba5f5ae14fb..bde3efa68d9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/api.py +++ b/services/web/server/src/simcore_service_webserver/projects/api.py @@ -1,32 +1,28 @@ # NOTE: we will slowly move heere projects_api.py -from ._access_rights_api import ( +from ._access_rights_service import ( check_user_project_permission, has_user_project_access_rights, ) -from ._groups_api import ( +from ._groups_service import ( create_project_group_without_checking_permissions, delete_project_group_without_checking_permissions, ) -from ._permalink_api import ProjectPermalink -from ._permalink_api import register_factory as register_permalink_factory -from ._wallets_api import ( +from ._wallets_service import ( check_project_financial_status, connect_wallet_to_project, get_project_wallet, ) __all__: tuple[str, ...] = ( + "check_project_financial_status", "check_user_project_permission", "connect_wallet_to_project", "create_project_group_without_checking_permissions", "delete_project_group_without_checking_permissions", "get_project_wallet", "has_user_project_access_rights", - "ProjectPermalink", - "register_permalink_factory", - "check_project_financial_status", ) diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index cc45dabdfb2..8c270f99df5 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -79,6 +79,10 @@ def __init__(self, *, project_uuid, reason, **ctx): self.reason = reason +class ProjectsBatchDeleteError(BaseProjectError): + msg_template = "One or more projects could not be deleted in the batch: {errors}" + + class ProjectTrashError(BaseProjectError): ... @@ -93,6 +97,12 @@ class ProjectRunningConflictError(ProjectTrashError): ) +class ProjectNotTrashedError(ProjectTrashError): + msg_template = ( + "Cannot delete project {project_uuid} since it was not trashed first: {reason}" + ) + + class NodeNotFoundError(BaseProjectError): msg_template = "Node '{node_uuid}' not found in project '{project_uuid}'" diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index d9974b436b0..16ff35651d6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -5,10 +5,10 @@ from aiopg.sa.result import RowProxy from common_library.dict_tools import remap_keys from models_library.api_schemas_webserver.projects import ProjectPatch +from models_library.api_schemas_webserver.projects_ui import StudyUI from models_library.folders import FolderID from models_library.groups import GroupID from models_library.projects import ClassifierID, ProjectID -from models_library.projects_ui import StudyUI from models_library.users import UserID from models_library.utils.common_validators import ( empty_str_to_none_pre_validator, @@ -36,7 +36,7 @@ def to_project_type_db(cls, api_type: "ProjectTypeAPI") -> ProjectType | None: }[api_type] -class ProjectDB(BaseModel): +class ProjectDBGet(BaseModel): # NOTE: model intented to read one-to-one columns of the `projects` table id: int type: ProjectType @@ -71,12 +71,12 @@ class ProjectDB(BaseModel): ) -class ProjectWithTrashExtra(ProjectDB): +class ProjectWithTrashExtra(ProjectDBGet): # This field is not part of the tables trashed_by_primary_gid: GroupID | None = None -class UserSpecificProjectDataDB(ProjectDB): +class UserSpecificProjectDataDBGet(ProjectDBGet): folder_id: FolderID | None model_config = ConfigDict(from_attributes=True) diff --git a/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py b/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py index 32531914163..4cf8a690aee 100644 --- a/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py @@ -12,7 +12,7 @@ from servicelib.logging_utils import log_decorator from servicelib.utils import fire_and_forget_task, logged_gather -from . import projects_service +from . import _projects_service from .utils import get_frontend_node_outputs_changes log = logging.getLogger(__name__) @@ -46,7 +46,7 @@ async def update_node_outputs( ui_changed_keys: set[str] | None, ) -> None: # the new outputs might be {}, or {key_name: payload} - project, keys_changed = await projects_service.update_project_node_outputs( + project, keys_changed = await _projects_service.update_project_node_outputs( app, user_id, project_uuid, @@ -55,14 +55,14 @@ async def update_node_outputs( new_run_hash=run_hash, ) - await projects_service.notify_project_node_update( + await _projects_service.notify_project_node_update( app, project, node_uuid, errors=node_errors ) # get depending node and notify for these ones as well depending_node_uuids = await project_get_depending_nodes(project, node_uuid) await logged_gather( *[ - projects_service.notify_project_node_update(app, project, nid, errors=None) + _projects_service.notify_project_node_update(app, project, nid, errors=None) for nid in depending_node_uuids ] ) @@ -86,7 +86,7 @@ async def update_node_outputs( ) # fire&forget to notify connected nodes to retrieve its inputs **if necessary** - await projects_service.post_trigger_connected_service_retrieve( + await _projects_service.post_trigger_connected_service_retrieve( app=app, project=project, updated_node_uuid=f"{node_uuid}", changed_keys=keys ) diff --git a/services/web/server/src/simcore_service_webserver/projects/plugin.py b/services/web/server/src/simcore_service_webserver/projects/plugin.py index 5cba65b8a2b..f968908c797 100644 --- a/services/web/server/src/simcore_service_webserver/projects/plugin.py +++ b/services/web/server/src/simcore_service_webserver/projects/plugin.py @@ -1,32 +1,33 @@ -""" projects management subsystem +"""projects management subsystem - A project is a document defining a osparc study - It contains metadata about the study (e.g. name, description, owner, etc) and a workbench section that describes the study pipeline +A project is a document defining a osparc study +It contains metadata about the study (e.g. name, description, owner, etc) and a workbench section that describes the study pipeline """ + import logging from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY -from . import ( - _comments_handlers, - _crud_handlers, - _folders_handlers, - _groups_handlers, - _metadata_handlers, - _nodes_handlers, - _ports_handlers, - _projects_nodes_pricing_unit_handlers, - _states_handlers, - _tags_handlers, - _trash_rest, - _wallets_handlers, - _workspaces_handlers, +from ..constants import APP_SETTINGS_KEY +from ._controller import ( + comments_rest, + folders_rest, + groups_rest, + metadata_rest, + nodes_pricing_unit_rest, + nodes_rest, + ports_rest, + projects_rest, + projects_states_rest, + tags_rest, + trash_rest, + wallets_rest, + workspaces_rest, ) -from ._observer import setup_project_observer_events -from ._projects_access import setup_projects_access -from .db import setup_projects_db +from ._controller.projects_slot import setup_project_observer_events +from ._projects_repository_legacy import setup_projects_db +from ._security_service import setup_projects_access logger = logging.getLogger(__name__) @@ -50,18 +51,18 @@ def setup_projects(app: web.Application) -> bool: # registers event handlers (e.g. on_user_disconnect) setup_project_observer_events(app) - app.router.add_routes(_states_handlers.routes) - app.router.add_routes(_crud_handlers.routes) - app.router.add_routes(_comments_handlers.routes) - app.router.add_routes(_groups_handlers.routes) - app.router.add_routes(_metadata_handlers.routes) - app.router.add_routes(_ports_handlers.routes) - app.router.add_routes(_nodes_handlers.routes) - app.router.add_routes(_tags_handlers.routes) - app.router.add_routes(_wallets_handlers.routes) - app.router.add_routes(_folders_handlers.routes) - app.router.add_routes(_projects_nodes_pricing_unit_handlers.routes) - app.router.add_routes(_workspaces_handlers.routes) - app.router.add_routes(_trash_rest.routes) + app.router.add_routes(projects_states_rest.routes) + app.router.add_routes(projects_rest.routes) + app.router.add_routes(comments_rest.routes) + app.router.add_routes(groups_rest.routes) + app.router.add_routes(metadata_rest.routes) + app.router.add_routes(ports_rest.routes) + app.router.add_routes(nodes_rest.routes) + app.router.add_routes(tags_rest.routes) + app.router.add_routes(wallets_rest.routes) + app.router.add_routes(folders_rest.routes) + app.router.add_routes(nodes_pricing_unit_rest.routes) + app.router.add_routes(workspaces_rest.routes) + app.router.add_routes(trash_rest.routes) return True diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_permalink_service.py b/services/web/server/src/simcore_service_webserver/projects/projects_permalink_service.py new file mode 100644 index 00000000000..ed242f8c1e3 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/projects_permalink_service.py @@ -0,0 +1,9 @@ +from ._permalink_service import ProjectPermalink +from ._permalink_service import register_factory as register_permalink_factory + +__all__: tuple[str, ...] = ( + "ProjectPermalink", + "register_permalink_factory", +) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_trash_service.py b/services/web/server/src/simcore_service_webserver/projects/projects_trash_service.py new file mode 100644 index 00000000000..2270ca66e6c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/projects_trash_service.py @@ -0,0 +1,13 @@ +from ._trash_service import ( + batch_delete_trashed_projects_as_admin, + delete_explicitly_trashed_project, + list_explicitly_trashed_projects, +) + +__all__: tuple[str, ...] = ( + "batch_delete_trashed_projects_as_admin", + "delete_explicitly_trashed_project", + "list_explicitly_trashed_projects", +) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/projects/settings.py b/services/web/server/src/simcore_service_webserver/projects/settings.py index ace29385602..198afae90a9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/settings.py +++ b/services/web/server/src/simcore_service_webserver/projects/settings.py @@ -4,7 +4,7 @@ from pydantic import ByteSize, Field, NonNegativeInt, TypeAdapter from settings_library.base import BaseCustomSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class ProjectsSettings(BaseCustomSettings): diff --git a/services/web/server/src/simcore_service_webserver/publications/_handlers.py b/services/web/server/src/simcore_service_webserver/publications/_rest.py similarity index 95% rename from services/web/server/src/simcore_service_webserver/publications/_handlers.py rename to services/web/server/src/simcore_service_webserver/publications/_rest.py index 2653bba1390..35ccbac61a5 100644 --- a/services/web/server/src/simcore_service_webserver/publications/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/publications/_rest.py @@ -2,7 +2,6 @@ from aiohttp import MultipartReader, hdrs, web from common_library.json_serialization import json_dumps -from json2html import json2html # type: ignore[import-untyped] from servicelib.aiohttp import status from servicelib.mimetype_constants import ( MIMETYPE_APPLICATION_JSON, @@ -14,7 +13,8 @@ from ..login.decorators import login_required from ..login.storage import AsyncpgStorage, get_plugin_storage from ..login.utils_email import AttachmentTuple, send_email_from_template, themed -from ..products.api import get_current_product +from ..products import products_web +from ._utils import json2html _logger = logging.getLogger(__name__) @@ -26,7 +26,7 @@ @routes.post(f"/{VTAG}/publications/service-submission", name="service_submission") @login_required async def service_submission(request: web.Request): - product = get_current_product(request) + product = products_web.get_current_product(request) reader = MultipartReader.from_response(request) # type: ignore[arg-type] # PC, IP Whoever is in charge of this. please have a look. this looks very weird data = None filename = None diff --git a/services/web/server/src/simcore_service_webserver/publications/_utils.py b/services/web/server/src/simcore_service_webserver/publications/_utils.py new file mode 100644 index 00000000000..0e1ab899db1 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/publications/_utils.py @@ -0,0 +1,240 @@ +""" +This module provides functionality to convert JSON data into an HTML table format. +It is a snapshot of the `json2html` library to avoid compatibility issues with +specific versions of `setuptools`. + +Classes: +- Json2Html: A class that provides methods to convert JSON data into HTML tables + or lists, with options for customization. + +Functions: +---------- +- Json2Html.convert: Converts JSON data into an HTML table or list format. +- Json2Html.column_headers_from_list_of_dicts: Determines column headers for a list of dictionaries. +- Json2Html.convert_json_node: Dispatches JSON input based on its type and processes it into HTML. +- Json2Html.convert_list: Converts a JSON list into an HTML table or list. +- Json2Html.convert_object: Converts a JSON object into an HTML table. + +Attributes: +----------- +- json2html: An instance of the Json2Html class for direct use. + +Notes: +------ +- This module supports Python 2.7+ and Python 3.x. +- It uses `OrderedDict` to preserve the order of keys in JSON objects. +- The `html_escape` function is used to escape HTML characters in text. + +License: +MIT License + +Source: +------- +Snapshot of https://github.com/softvar/json2html/blob/0a223c7b3e5dce286811fb12bbab681e7212ebfe/json2html/jsonconv.py +JSON 2 HTML Converter +===================== + +(c) Varun Malhotra 2013 +Source Code: https://github.com/softvar/json2html + + +Contributors: +------------- +1. Michel Müller (@muellermichel), https://github.com/softvar/json2html/pull/2 +2. Daniel Lekic (@lekic), https://github.com/softvar/json2html/pull/17 + +LICENSE: MIT +-------- +""" + +# pylint: skip-file +# +# NOTE: Snapshot of https://github.com/softvar/json2html/blob/0a223c7b3e5dce286811fb12bbab681e7212ebfe/json2html/jsonconv.py +# to avoid failure to install this module with `setuptools 78.0.1` due to +# deprecated feature that this library still uses +# + + +import sys + +if sys.version_info[:2] < (2, 7): + import simplejson as json_parser + from ordereddict import OrderedDict +else: + import json as json_parser + from collections import OrderedDict + +if sys.version_info[:2] < (3, 0): + from cgi import escape as html_escape + + text = unicode + text_types = (unicode, str) +else: + from html import escape as html_escape + + text = str + text_types = (str,) + + +class Json2Html: + def convert( + self, + json="", + table_attributes='border="1"', + clubbing=True, + encode=False, + escape=True, + ): + """ + Convert JSON to HTML Table format + """ + # table attributes such as class, id, data-attr-*, etc. + # eg: table_attributes = 'class = "table table-bordered sortable"' + self.table_init_markup = "" % table_attributes + self.clubbing = clubbing + self.escape = escape + json_input = None + if not json: + json_input = {} + elif type(json) in text_types: + try: + json_input = json_parser.loads(json, object_pairs_hook=OrderedDict) + except ValueError as e: + # so the string passed here is actually not a json string + # - let's analyze whether we want to pass on the error or use the string as-is as a text node + if "Expecting property name" in text(e): + # if this specific json loads error is raised, then the user probably actually wanted to pass json, but made a mistake + raise e + json_input = json + else: + json_input = json + converted = self.convert_json_node(json_input) + if encode: + return converted.encode("ascii", "xmlcharrefreplace") + return converted + + def column_headers_from_list_of_dicts(self, json_input): + """ + This method is required to implement clubbing. + It tries to come up with column headers for your input + """ + if ( + not json_input + or not hasattr(json_input, "__getitem__") + or not hasattr(json_input[0], "keys") + ): + return None + column_headers = json_input[0].keys() + for entry in json_input: + if ( + not hasattr(entry, "keys") + or not hasattr(entry, "__iter__") + or len(entry.keys()) != len(column_headers) + ): + return None + for header in column_headers: + if header not in entry: + return None + return column_headers + + def convert_json_node(self, json_input): + """ + Dispatch JSON input according to the outermost type and process it + to generate the super awesome HTML format. + We try to adhere to duck typing such that users can just pass all kinds + of funky objects to json2html that *behave* like dicts and lists and other + basic JSON types. + """ + if type(json_input) in text_types: + if self.escape: + return html_escape(text(json_input)) + else: + return text(json_input) + if hasattr(json_input, "items"): + return self.convert_object(json_input) + if hasattr(json_input, "__iter__") and hasattr(json_input, "__getitem__"): + return self.convert_list(json_input) + return text(json_input) + + def convert_list(self, list_input): + """ + Iterate over the JSON list and process it + to generate either an HTML table or a HTML list, depending on what's inside. + If suppose some key has array of objects and all the keys are same, + instead of creating a new row for each such entry, + club such values, thus it makes more sense and more readable table. + + @example: + jsonObject = { + "sampleData": [ + {"a":1, "b":2, "c":3}, + {"a":5, "b":6, "c":7} + ] + } + OUTPUT: + _____________________________ + | | | | | + | | a | c | b | + | sampleData |---|---|---| + | | 1 | 3 | 2 | + | | 5 | 7 | 6 | + ----------------------------- + + @contributed by: @muellermichel + """ + if not list_input: + return "" + converted_output = "" + column_headers = None + if self.clubbing: + column_headers = self.column_headers_from_list_of_dicts(list_input) + if column_headers is not None: + converted_output += self.table_init_markup + converted_output += "" + converted_output += ( + "" + ) + converted_output += "" + converted_output += "" + for list_entry in list_input: + converted_output += "" + converted_output += "" + converted_output += "
" + "".join(column_headers) + "
" + converted_output += "".join( + [ + self.convert_json_node(list_entry[column_header]) + for column_header in column_headers + ] + ) + converted_output += "
" + return converted_output + + # so you don't want or need clubbing eh? This makes @muellermichel very sad... ;( + # alright, let's fall back to a basic list here... + converted_output = "
  • " + converted_output += "
  • ".join( + [self.convert_json_node(child) for child in list_input] + ) + converted_output += "
" + return converted_output + + def convert_object(self, json_input): + """ + Iterate over the JSON object and process it + to generate the super awesome HTML Table format + """ + if not json_input: + return "" # avoid empty tables + converted_output = self.table_init_markup + "" + converted_output += "".join( + [ + "%s%s" + % (self.convert_json_node(k), self.convert_json_node(v)) + for k, v in json_input.items() + ] + ) + converted_output += "" + return converted_output + + +json2html = Json2Html() diff --git a/services/web/server/src/simcore_service_webserver/publications/plugin.py b/services/web/server/src/simcore_service_webserver/publications/plugin.py index e1460d653dc..a85b83cf3b8 100644 --- a/services/web/server/src/simcore_service_webserver/publications/plugin.py +++ b/services/web/server/src/simcore_service_webserver/publications/plugin.py @@ -1,6 +1,5 @@ -""" publications management subsystem +"""publications management subsystem""" -""" import logging from aiohttp import web @@ -9,9 +8,9 @@ from ..email.plugin import setup_email from ..products.plugin import setup_products -from . import _handlers +from . import _rest -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) @app_module_setup( @@ -19,7 +18,7 @@ ModuleCategory.ADDON, depends=["simcore_service_webserver.rest"], settings_name="WEBSERVER_PUBLICATIONS", - logger=logger, + logger=_logger, ) def setup_publications(app: web.Application): assert app[APP_SETTINGS_KEY].WEBSERVER_PUBLICATIONS # nosec @@ -27,4 +26,4 @@ def setup_publications(app: web.Application): setup_email(app) setup_products(app) - app.router.add_routes(_handlers.routes) + app.router.add_routes(_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/rabbitmq_settings.py b/services/web/server/src/simcore_service_webserver/rabbitmq_settings.py index a05929f1c1b..79a85b69d5c 100644 --- a/services/web/server/src/simcore_service_webserver/rabbitmq_settings.py +++ b/services/web/server/src/simcore_service_webserver/rabbitmq_settings.py @@ -8,7 +8,7 @@ from aiohttp.web import Application from settings_library.rabbit import RabbitSettings -from ._constants import APP_SETTINGS_KEY +from .constants import APP_SETTINGS_KEY def get_plugin_settings(app: Application) -> RabbitSettings: diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py index 5caebe02c53..cd66a4e004d 100644 --- a/services/web/server/src/simcore_service_webserver/redis.py +++ b/services/web/server/src/simcore_service_webserver/redis.py @@ -6,8 +6,8 @@ from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase, RedisSettings -from ._constants import APP_SETTINGS_KEY from ._meta import APP_NAME +from .constants import APP_SETTINGS_KEY _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_client.py b/services/web/server/src/simcore_service_webserver/resource_usage/_client.py index 63d5187a7d5..d6f5c9598b5 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_client.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_client.py @@ -16,8 +16,8 @@ WalletTotalCredits, ) from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingUnitGet, ) from models_library.resource_tracker import PricingPlanId, PricingUnitId from models_library.users import UserID @@ -86,7 +86,7 @@ async def list_service_runs_by_user_and_product_and_wallet( async def get_default_service_pricing_plan( app: web.Application, product_name: str, service_key: str, service_version: str -) -> PricingPlanGet: +) -> RutPricingPlanGet: settings: ResourceUsageTrackerSettings = get_plugin_settings(app) url = URL( f"{settings.api_base_url}/services/{urllib.parse.quote_plus(service_key)}/{service_version}/pricing-plan", @@ -101,7 +101,7 @@ async def get_default_service_pricing_plan( async with session.get(url) as response: response.raise_for_status() body: dict = await response.json() - return PricingPlanGet.model_validate(body) + return RutPricingPlanGet.model_validate(body) except ClientResponseError as e: if e.status == status.HTTP_404_NOT_FOUND: raise DefaultPricingPlanNotFoundError from e @@ -113,7 +113,7 @@ async def get_pricing_plan_unit( product_name: str, pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, -) -> PricingUnitGet: +) -> RutPricingUnitGet: settings: ResourceUsageTrackerSettings = get_plugin_settings(app) url = ( URL(settings.api_base_url) @@ -130,7 +130,7 @@ async def get_pricing_plan_unit( async with session.get(url) as response: response.raise_for_status() body: dict = await response.json() - return PricingUnitGet.model_validate(body) + return RutPricingUnitGet.model_validate(body) async def sum_total_available_credits_in_the_wallet( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py b/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py index 4362247f3ec..114bc1df298 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py @@ -1,7 +1,4 @@ -""" Handlers to events registered in servicelib.observer.event_registry - -""" - +"""Handlers to events registered in servicelib.observer.event_registry""" import logging @@ -15,7 +12,7 @@ from servicelib.utils import logged_gather from ..notifications import wallet_osparc_credits -from ..wallets import api as wallets_api +from ..wallets import api as wallets_service _logger = logging.getLogger(__name__) @@ -30,7 +27,7 @@ async def _on_user_disconnected( assert client_session_id # nosec # Get all user wallets and unsubscribe - user_wallet = await wallets_api.list_wallets_for_user( + user_wallet = await wallets_service.list_wallets_for_user( app, user_id=user_id, product_name=product_name ) disconnect_tasks = [ @@ -44,7 +41,7 @@ async def _on_user_connected( user_id: int, app: web.Application, product_name: str ) -> None: # Get all user wallets and subscribe - user_wallet = await wallets_api.list_wallets_for_user( + user_wallet = await wallets_service.list_wallets_for_user( app, user_id=user_id, product_name=product_name ) _logger.debug("Connecting user %s to wallets %s", f"{user_id}", f"{user_wallet}") diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py index 93386ff05a8..338eed936a5 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py @@ -2,7 +2,7 @@ from aiohttp import web from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, + RutPricingPlanGet, ) from models_library.api_schemas_webserver.resource_usage import ( ConnectServiceToPricingPlanBodyParams, @@ -33,6 +33,9 @@ from servicelib.aiohttp.typing_extension import Handler from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.rabbitmq._errors import RPCServerError +from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( + PricingUnitDuplicationError, +) from servicelib.rest_constants import RESPONSE_MODEL_POLICY from .._meta import API_VTAG as VTAG @@ -54,6 +57,9 @@ async def wrapper(request: web.Request) -> web.StreamResponse: try: return await handler(request) + except (ValueError, PricingUnitDuplicationError) as exc: + raise web.HTTPBadRequest(reason=f"{exc}") from exc + except RPCServerError as exc: # NOTE: This will be improved; we will add a mapping between # RPC errors and user-friendly frontend errors to pass to the frontend. @@ -85,12 +91,14 @@ async def list_pricing_plans_for_admin_user(request: web.Request): PageQueryParameters, request ) - pricing_plan_page = await pricing_plans_admin_service.list_pricing_plans( - app=request.app, - product_name=req_ctx.product_name, - exclude_inactive=False, - offset=query_params.offset, - limit=query_params.limit, + pricing_plan_page = ( + await pricing_plans_admin_service.list_pricing_plans_without_pricing_units( + app=request.app, + product_name=req_ctx.product_name, + exclude_inactive=False, + offset=query_params.offset, + limit=query_params.limit, + ) ) webserver_pricing_plans = [ PricingPlanAdminGet( @@ -121,7 +129,9 @@ async def list_pricing_plans_for_admin_user(request: web.Request): ) -def pricing_plan_get_to_admin(pricing_plan_get: PricingPlanGet) -> PricingPlanAdminGet: +def pricing_plan_get_to_admin( + pricing_plan_get: RutPricingPlanGet, +) -> PricingPlanAdminGet: """ Convert a PricingPlanGet object into a PricingPlanAdminGet object. """ diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_service.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_service.py index 7f574b69d5d..d74bdee870a 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_service.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_service.py @@ -1,18 +1,21 @@ from aiohttp import web from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingPlanPage, PricingPlanToServiceGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingPlanPage, + RutPricingUnitGet, ) from models_library.products import ProductName from models_library.resource_tracker import ( + PricingPlanClassification, PricingPlanCreate, PricingPlanId, PricingPlanUpdate, PricingUnitId, PricingUnitWithCostCreate, PricingUnitWithCostUpdate, + UnitExtraInfoLicense, + UnitExtraInfoTier, ) from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID @@ -21,27 +24,29 @@ pricing_units, ) -from ..catalog import client as catalog_client +from ..catalog import catalog_service from ..rabbitmq import get_rabbitmq_rpc_client ## Pricing Plans -async def list_pricing_plans( +async def list_pricing_plans_without_pricing_units( app: web.Application, *, product_name: ProductName, exclude_inactive: bool, offset: int, limit: int, -) -> PricingPlanPage: +) -> RutPricingPlanPage: rpc_client = get_rabbitmq_rpc_client(app) - output: PricingPlanPage = await pricing_plans.list_pricing_plans( - rpc_client, - product_name=product_name, - exclude_inactive=exclude_inactive, - offset=offset, - limit=limit, + output: RutPricingPlanPage = ( + await pricing_plans.list_pricing_plans_without_pricing_units( + rpc_client, + product_name=product_name, + exclude_inactive=exclude_inactive, + offset=offset, + limit=limit, + ) ) return output @@ -50,7 +55,7 @@ async def get_pricing_plan( app: web.Application, product_name: ProductName, pricing_plan_id: PricingPlanId, -) -> PricingPlanGet: +) -> RutPricingPlanGet: rpc_client = get_rabbitmq_rpc_client(app) return await pricing_plans.get_pricing_plan( rpc_client, @@ -62,14 +67,14 @@ async def get_pricing_plan( async def create_pricing_plan( app: web.Application, data: PricingPlanCreate, -) -> PricingPlanGet: +) -> RutPricingPlanGet: rpc_client = get_rabbitmq_rpc_client(app) return await pricing_plans.create_pricing_plan(rpc_client, data=data) async def update_pricing_plan( app: web.Application, product_name: ProductName, data: PricingPlanUpdate -) -> PricingPlanGet: +) -> RutPricingPlanGet: rpc_client = get_rabbitmq_rpc_client(app) return await pricing_plans.update_pricing_plan( rpc_client, product_name=product_name, data=data @@ -84,7 +89,7 @@ async def get_pricing_unit( product_name: ProductName, pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, -) -> PricingUnitGet: +) -> RutPricingUnitGet: rpc_client = get_rabbitmq_rpc_client(app) return await pricing_units.get_pricing_unit( rpc_client, @@ -96,8 +101,12 @@ async def get_pricing_unit( async def create_pricing_unit( app: web.Application, product_name: ProductName, data: PricingUnitWithCostCreate -) -> PricingUnitGet: +) -> RutPricingUnitGet: rpc_client = get_rabbitmq_rpc_client(app) + pricing_plan = await pricing_plans.get_pricing_plan( + rpc_client, product_name=product_name, pricing_plan_id=data.pricing_plan_id + ) + _validate_pricing_unit(pricing_plan.classification, data.unit_extra_info) return await pricing_units.create_pricing_unit( rpc_client, product_name=product_name, data=data ) @@ -105,13 +114,31 @@ async def create_pricing_unit( async def update_pricing_unit( app: web.Application, product_name: ProductName, data: PricingUnitWithCostUpdate -) -> PricingUnitGet: +) -> RutPricingUnitGet: rpc_client = get_rabbitmq_rpc_client(app) + pricing_plan = await pricing_plans.get_pricing_plan( + rpc_client, product_name=product_name, pricing_plan_id=data.pricing_plan_id + ) + _validate_pricing_unit(pricing_plan.classification, data.unit_extra_info) return await pricing_units.update_pricing_unit( rpc_client, product_name=product_name, data=data ) +def _validate_pricing_unit(classification: PricingPlanClassification, unit_extra_info): + if classification == PricingPlanClassification.LICENSE: + if not isinstance(unit_extra_info, UnitExtraInfoLicense): + msg = "Expected UnitExtraInfoLicense (num_of_seats) for LICENSE classification" + raise ValueError(msg) + elif classification == PricingPlanClassification.TIER: + if not isinstance(unit_extra_info, UnitExtraInfoTier): + msg = "Expected UnitExtraInfoTier (CPU, RAM, VRAM) for TIER classification" + raise ValueError(msg) + else: + msg = "Not known pricing plan classification" + raise ValueError(msg) + + ## Pricing Plans to Service @@ -119,10 +146,10 @@ async def list_connected_services_to_pricing_plan( app: web.Application, product_name: ProductName, pricing_plan_id: PricingPlanId ) -> list[PricingPlanToServiceGet]: rpc_client = get_rabbitmq_rpc_client(app) - output: list[ - PricingPlanToServiceGet - ] = await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan( - rpc_client, product_name=product_name, pricing_plan_id=pricing_plan_id + output: list[PricingPlanToServiceGet] = ( + await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan( + rpc_client, product_name=product_name, pricing_plan_id=pricing_plan_id + ) ) return output @@ -136,7 +163,7 @@ async def connect_service_to_pricing_plan( service_version: ServiceVersion, ) -> PricingPlanToServiceGet: # Check whether service key and version exists - await catalog_client.get_service( + await catalog_service.get_service( app, user_id, service_key, service_version, product_name ) diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py index e97446c2d88..5bd826a24a5 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py @@ -96,12 +96,14 @@ async def list_pricing_plans(request: web.Request): PageQueryParameters, request ) - pricing_plan_page = await pricing_plans_admin_service.list_pricing_plans( - app=request.app, - product_name=req_ctx.product_name, - exclude_inactive=True, - offset=query_params.offset, - limit=query_params.limit, + pricing_plan_page = ( + await pricing_plans_admin_service.list_pricing_plans_without_pricing_units( + app=request.app, + product_name=req_ctx.product_name, + exclude_inactive=True, + offset=query_params.offset, + limit=query_params.limit, + ) ) webserver_pricing_plans = [ PricingPlanGet( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_service.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_service.py index a29ffa32632..6c5b07714a8 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_service.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_service.py @@ -1,7 +1,7 @@ from aiohttp import web from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingUnitGet, ) from models_library.products import ProductName from models_library.resource_tracker import PricingPlanId, PricingUnitId @@ -15,8 +15,8 @@ async def get_default_service_pricing_plan( product_name: ProductName, service_key: ServiceKey, service_version: ServiceVersion, -) -> PricingPlanGet: - data: PricingPlanGet = ( +) -> RutPricingPlanGet: + data: RutPricingPlanGet = ( await resource_tracker_client.get_default_service_pricing_plan( app=app, product_name=product_name, @@ -33,8 +33,8 @@ async def get_pricing_plan_unit( product_name: ProductName, pricing_plan_id: PricingPlanId, pricing_unit_id: PricingUnitId, -) -> PricingUnitGet: - data: PricingUnitGet = await resource_tracker_client.get_pricing_plan_unit( +) -> RutPricingUnitGet: + data: RutPricingUnitGet = await resource_tracker_client.get_pricing_plan_unit( app=app, product_name=product_name, pricing_plan_id=pricing_plan_id, diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/service.py b/services/web/server/src/simcore_service_webserver/resource_usage/service.py index 05992fe36e4..39ddc24a2ef 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/service.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/service.py @@ -11,6 +11,7 @@ from models_library.wallets import WalletID from . import _client +from ._pricing_plans_admin_service import get_pricing_plan from ._pricing_plans_service import ( get_default_service_pricing_plan, get_pricing_plan_unit, @@ -56,4 +57,5 @@ async def add_credits_to_wallet( __all__ = ( "get_default_service_pricing_plan", "get_pricing_plan_unit", + "get_pricing_plan", ) diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/settings.py b/services/web/server/src/simcore_service_webserver/resource_usage/settings.py index 70687177fcb..db1df4b8bca 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/settings.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/settings.py @@ -7,7 +7,7 @@ from aiohttp import web from settings_library.resource_usage_tracker import ResourceUsageTrackerSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY def get_plugin_settings(app: web.Application) -> ResourceUsageTrackerSettings: diff --git a/services/web/server/src/simcore_service_webserver/rest/_handlers.py b/services/web/server/src/simcore_service_webserver/rest/_handlers.py index b874d441db0..5425d7341e4 100644 --- a/services/web/server/src/simcore_service_webserver/rest/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/rest/_handlers.py @@ -11,10 +11,10 @@ from pydantic import BaseModel from servicelib.aiohttp import status -from .._constants import APP_PUBLIC_CONFIG_PER_PRODUCT, APP_SETTINGS_KEY from .._meta import API_VTAG +from ..constants import APP_PUBLIC_CONFIG_PER_PRODUCT, APP_SETTINGS_KEY from ..login.decorators import login_required -from ..products.api import get_product_name +from ..products import products_web from ..redis import get_redis_scheduled_maintenance_client from ..utils_aiohttp import envelope_json_response from .healthcheck import HealthCheck, HealthCheckError @@ -76,7 +76,7 @@ async def get_config(request: web.Request): """ app_public_config: dict[str, Any] = request.app[APP_SETTINGS_KEY].public_dict() - product_name = get_product_name(request=request) + product_name = products_web.get_product_name(request=request) product_public_config = request.app.get(APP_PUBLIC_CONFIG_PER_PRODUCT, {}).get( product_name, {} ) diff --git a/services/web/server/src/simcore_service_webserver/rest/healthcheck.py b/services/web/server/src/simcore_service_webserver/rest/healthcheck.py index fd4b5045215..dc31678c3ef 100644 --- a/services/web/server/src/simcore_service_webserver/rest/healthcheck.py +++ b/services/web/server/src/simcore_service_webserver/rest/healthcheck.py @@ -55,7 +55,7 @@ TypedDict, ) -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY _HealthCheckSlot = Callable[[web.Application], Awaitable[None]] diff --git a/services/web/server/src/simcore_service_webserver/rest/settings.py b/services/web/server/src/simcore_service_webserver/rest/settings.py index d061af13d5c..3f3047d7fb0 100644 --- a/services/web/server/src/simcore_service_webserver/rest/settings.py +++ b/services/web/server/src/simcore_service_webserver/rest/settings.py @@ -1,7 +1,7 @@ from aiohttp import web from settings_library.base import BaseCustomSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class RestSettings(BaseCustomSettings): diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/settings.py b/services/web/server/src/simcore_service_webserver/scicrunch/settings.py index 0bf88e69b05..e265f4b5323 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/settings.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/settings.py @@ -2,7 +2,7 @@ from pydantic import Field, HttpUrl, SecretStr, TypeAdapter from settings_library.base import BaseCustomSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY # TODO: read https://www.force11.org/group/resource-identification-initiative SCICRUNCH_DEFAULT_URL = "https://scicrunch.org" diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py b/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py index 0bd7e6a75eb..e0cc216e22b 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py @@ -61,6 +61,7 @@ class PermissionDict(TypedDict, total=False): "groups.*", "catalog/licensed-items.*", "product.price.read", + "product.ui.read", "project.folders.*", "project.access_rights.update", "project.classifier.*", @@ -111,8 +112,8 @@ class PermissionDict(TypedDict, total=False): UserRole.ADMIN: PermissionDict( can=[ "admin.*", - "storage.files.sync", "resource-usage.write", + "storage.files.sync", ], inherits=[UserRole.TESTER], ), diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py index 612c1e64975..3bd5408f4d3 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py @@ -1,6 +1,4 @@ -""" AUTHoriZation (auth) policy - -""" +"""AUTHoriZation (auth) policy""" import contextlib import logging @@ -14,7 +12,7 @@ ) from models_library.products import ProductName from models_library.users import UserID -from simcore_postgres_database.errors import DatabaseError +from simcore_postgres_database.aiopg_errors import DatabaseError from ..db.plugin import get_database_engine from ._authz_access_model import ( diff --git a/services/web/server/src/simcore_service_webserver/session/settings.py b/services/web/server/src/simcore_service_webserver/session/settings.py index 74a7f18f2e9..4e1c99dac68 100644 --- a/services/web/server/src/simcore_service_webserver/session/settings.py +++ b/services/web/server/src/simcore_service_webserver/session/settings.py @@ -7,7 +7,7 @@ from settings_library.base import BaseCustomSettings from settings_library.utils_session import MixinSessionSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY _MINUTE: Final[int] = 60 # secs diff --git a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py index 078c22e8cf7..85a618f15d1 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py @@ -19,7 +19,8 @@ from ..groups.api import list_user_groups_ids_with_read_access from ..login.decorators import login_required -from ..products.api import Product, get_current_product +from ..products import products_web +from ..products.models import Product from ..resource_manager.user_sessions import managed_resource from ._utils import EnvironDict, SocketID, get_socket_server, register_socketio_handler from .messages import SOCKET_IO_HEARTBEAT_EVENT, send_message_to_user @@ -51,7 +52,7 @@ async def _handler(request: web.Request) -> tuple[UserID, ProductName]: app = request.app user_id = UserID(request.get(RQT_USERID_KEY, _ANONYMOUS_USER_ID)) client_session_id = request.query.get("client_session_id", None) - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) _logger.debug( "client %s,%s authenticated", f"{user_id=}", f"{client_session_id=}" diff --git a/services/web/server/src/simcore_service_webserver/socketio/plugin.py b/services/web/server/src/simcore_service_webserver/socketio/plugin.py index 20ceef31053..86d19aceeac 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/plugin.py +++ b/services/web/server/src/simcore_service_webserver/socketio/plugin.py @@ -9,7 +9,7 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..rabbitmq import setup_rabbitmq from ._observer import setup_socketio_observer_events from .server import setup_socketio_server diff --git a/services/web/server/src/simcore_service_webserver/statics/_constants.py b/services/web/server/src/simcore_service_webserver/statics/_constants.py index ec85d8114a5..72f4b298276 100644 --- a/services/web/server/src/simcore_service_webserver/statics/_constants.py +++ b/services/web/server/src/simcore_service_webserver/statics/_constants.py @@ -1,22 +1,4 @@ -# these are the apps built right now by static-webserver/client - -FRONTEND_APPS_AVAILABLE = frozenset( - { - "osparc", - "tis", - "tiplite", - "s4l", - "s4llite", - "s4lacad", - "s4lengine", - "s4ldesktop", - "s4ldesktopacad", - } -) -FRONTEND_APP_DEFAULT = "osparc" - -assert FRONTEND_APP_DEFAULT in FRONTEND_APPS_AVAILABLE # nosec - +from ..constants import FRONTEND_APP_DEFAULT, FRONTEND_APPS_AVAILABLE STATIC_DIRNAMES = FRONTEND_APPS_AVAILABLE | {"resource", "transpiled"} @@ -24,3 +6,11 @@ APP_FRONTEND_CACHED_STATICS_JSON_KEY = f"{__name__}.cached_statics_json" APP_CLIENTAPPS_SETTINGS_KEY = f"{__file__}.client_apps_settings" + + +__all__: tuple[str, ...] = ( + "FRONTEND_APPS_AVAILABLE", + "FRONTEND_APP_DEFAULT", +) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/statics/_events.py b/services/web/server/src/simcore_service_webserver/statics/_events.py index b34f7e8948a..1d1e1912004 100644 --- a/services/web/server/src/simcore_service_webserver/statics/_events.py +++ b/services/web/server/src/simcore_service_webserver/statics/_events.py @@ -7,6 +7,7 @@ from aiohttp.client import ClientSession from aiohttp.client_exceptions import ClientConnectionError, ClientError from common_library.json_serialization import json_dumps +from packaging.version import Version from servicelib.aiohttp.client_session import get_client_session from tenacity.asyncio import AsyncRetrying from tenacity.before import before_log @@ -15,9 +16,9 @@ from tenacity.wait import wait_fixed from yarl import URL -from .._constants import APP_PRODUCTS_KEY from ..application_settings import ApplicationSettings, get_application_settings -from ..products.api import Product +from ..constants import APP_PRODUCTS_KEY +from ..products.models import Product from ._constants import ( APP_FRONTEND_CACHED_INDEXES_KEY, APP_FRONTEND_CACHED_STATICS_JSON_KEY, @@ -93,6 +94,11 @@ async def create_cached_indexes(app: web.Application) -> None: app[APP_FRONTEND_CACHED_INDEXES_KEY] = cached_indexes +def _get_release_notes_vtag(vtag: str) -> str: + version = Version(vtag) + return f"v{version.major}.{version.minor}.0" + + async def create_and_cache_statics_json(app: web.Application) -> None: # NOTE: in devel model, the folder might be under construction # (qx-compile takes time), therefore we create statics.json @@ -132,7 +138,8 @@ async def create_and_cache_statics_json(app: web.Application) -> None: ): # template URL should be somethign like: # https://github.com/ITISFoundation/osparc-issues/blob/master/release-notes/osparc/{vtag}.md - data["vcsReleaseUrl"] = template_url.format(vtag=vtag) + release_vtag = _get_release_notes_vtag(vtag) + data["vcsReleaseUrl"] = template_url.format(vtag=release_vtag) data_json = json_dumps(data) _logger.debug("Front-end statics.json: %s", data_json) diff --git a/services/web/server/src/simcore_service_webserver/statics/_handlers.py b/services/web/server/src/simcore_service_webserver/statics/_handlers.py index ecda8a0a83e..0f37438e69c 100644 --- a/services/web/server/src/simcore_service_webserver/statics/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/statics/_handlers.py @@ -3,7 +3,7 @@ from aiohttp import web from servicelib.mimetype_constants import MIMETYPE_TEXT_HTML -from ..products.api import get_product_name +from ..products import products_web from ._constants import ( APP_FRONTEND_CACHED_INDEXES_KEY, APP_FRONTEND_CACHED_STATICS_JSON_KEY, @@ -14,7 +14,7 @@ async def get_cached_frontend_index(request: web.Request): - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) assert ( # nosec product_name in FRONTEND_APPS_AVAILABLE @@ -38,7 +38,7 @@ async def get_cached_frontend_index(request: web.Request): async def get_statics_json(request: web.Request): - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) return web.Response( body=request.app[APP_FRONTEND_CACHED_STATICS_JSON_KEY].get(product_name, None), diff --git a/services/web/server/src/simcore_service_webserver/statics/plugin.py b/services/web/server/src/simcore_service_webserver/statics/plugin.py index 4178325851f..07c30033fe8 100644 --- a/services/web/server/src/simcore_service_webserver/statics/plugin.py +++ b/services/web/server/src/simcore_service_webserver/statics/plugin.py @@ -11,7 +11,7 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import INDEX_RESOURCE_NAME +from ..constants import INDEX_RESOURCE_NAME from ..products.plugin import setup_products from ._events import create_and_cache_statics_json, create_cached_indexes from ._handlers import get_cached_frontend_index, get_statics_json diff --git a/services/web/server/src/simcore_service_webserver/statics/settings.py b/services/web/server/src/simcore_service_webserver/statics/settings.py index 32c3b740220..3915c59a156 100644 --- a/services/web/server/src/simcore_service_webserver/statics/settings.py +++ b/services/web/server/src/simcore_service_webserver/statics/settings.py @@ -13,7 +13,7 @@ TypedDict, ) -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class ThirdPartyInfoDict(TypedDict): diff --git a/services/web/server/src/simcore_service_webserver/storage/_handlers.py b/services/web/server/src/simcore_service_webserver/storage/_handlers.py deleted file mode 100644 index 83372296dd2..00000000000 --- a/services/web/server/src/simcore_service_webserver/storage/_handlers.py +++ /dev/null @@ -1,346 +0,0 @@ -""" Handlers exposed by storage subsystem - - Mostly resolves and redirect to storage API -""" -import logging -from typing import Any, Final, NamedTuple - -from aiohttp import ClientTimeout, web -from models_library.api_schemas_storage import ( - FileUploadCompleteResponse, - FileUploadCompletionBody, - FileUploadSchema, - LinkType, -) -from models_library.projects_nodes_io import LocationID -from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, BaseModel, ByteSize, TypeAdapter -from servicelib.aiohttp.client_session import get_client_session -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_path_parameters_as, - parse_request_query_parameters_as, -) -from servicelib.aiohttp.rest_responses import create_data_response, unwrap_envelope -from servicelib.common_headers import X_FORWARDED_PROTO -from servicelib.request_keys import RQT_USERID_KEY -from yarl import URL - -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from .schemas import StorageFileIDStr -from .settings import StorageSettings, get_plugin_settings - -log = logging.getLogger(__name__) - - -def _get_base_storage_url(app: web.Application) -> URL: - settings: StorageSettings = get_plugin_settings(app) - return URL(settings.base_url, encoded=True) - - -def _get_storage_vtag(app: web.Application) -> str: - settings: StorageSettings = get_plugin_settings(app) - storage_prefix: str = settings.STORAGE_VTAG - return storage_prefix - - -def _to_storage_url(request: web.Request) -> URL: - """Converts web-api url to storage-api url""" - userid = request[RQT_USERID_KEY] - - # storage service API endpoint - url = _get_base_storage_url(request.app) - - basepath_index = 3 - # strip basepath from webserver API path (i.e. webserver api version) - # >>> URL('http://storage:1234/v5/storage/asdf/').raw_parts[3:] - suffix = "/".join(request.url.raw_parts[basepath_index:]) - - return ( - url.joinpath(suffix, encoded=True) - .with_query(request.query) - .update_query(user_id=userid) - ) - - -def _from_storage_url(request: web.Request, storage_url: AnyUrl) -> AnyUrl: - """Converts storage-api url to web-api url""" - assert storage_url.path # nosec - - prefix = f"/{_get_storage_vtag(request.app)}" - converted_url = request.url.with_path( - f"/v0/storage{storage_url.path.removeprefix(prefix)}", encoded=True - ).with_scheme(request.headers.get(X_FORWARDED_PROTO, request.url.scheme)) - - webserver_url: AnyUrl = TypeAdapter(AnyUrl).validate_python(f"{converted_url}") - return webserver_url - - -class _ResponseTuple(NamedTuple): - payload: Any - status_code: int - - -async def _forward_request_to_storage( - request: web.Request, method: str, body: dict[str, Any] | None = None, **kwargs -) -> _ResponseTuple: - url = _to_storage_url(request) - session = get_client_session(request.app) - - async with session.request( - method.upper(), url, ssl=False, json=body, **kwargs - ) as resp: - payload = await resp.json() - return _ResponseTuple(payload=payload, status_code=resp.status) - - -# --------------------------------------------------------------------- - -routes = web.RouteTableDef() -_path_prefix = f"/{API_VTAG}/storage/locations" - - -@routes.get(_path_prefix, name="get_storage_locations") -@login_required -@permission_required("storage.files.*") -async def get_storage_locations(request: web.Request) -> web.Response: - payload, status = await _forward_request_to_storage(request, "GET", body=None) - return create_data_response(payload, status=status) - - -@routes.get(_path_prefix + "/{location_id}/datasets", name="get_datasets_metadata") -@login_required -@permission_required("storage.files.*") -async def get_datasets_metadata(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - - parse_request_path_parameters_as(_PathParams, request) - - payload, status = await _forward_request_to_storage(request, "GET", body=None) - return create_data_response(payload, status=status) - - -@routes.get( - _path_prefix + "/{location_id}/files/metadata", - name="get_files_metadata", -) -@login_required -@permission_required("storage.files.*") -async def get_files_metadata(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - - parse_request_path_parameters_as(_PathParams, request) - - class _QueryParams(BaseModel): - uuid_filter: str = "" - expand_dirs: bool = True - - parse_request_query_parameters_as(_QueryParams, request) - - payload, status = await _forward_request_to_storage(request, "GET", body=None) - return create_data_response(payload, status=status) - - -_LIST_ALL_DATASETS_TIMEOUT_S: Final[int] = 60 - - -@routes.get( - _path_prefix + "/{location_id}/datasets/{dataset_id}/metadata", - name="get_files_metadata_dataset", -) -@login_required -@permission_required("storage.files.*") -async def get_files_metadata_dataset(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - dataset_id: str - - parse_request_path_parameters_as(_PathParams, request) - - class _QueryParams(BaseModel): - uuid_filter: str = "" - expand_dirs: bool = True - - parse_request_query_parameters_as(_QueryParams, request) - - payload, status = await _forward_request_to_storage( - request, - "GET", - body=None, - timeout=ClientTimeout(total=_LIST_ALL_DATASETS_TIMEOUT_S), - ) - return create_data_response(payload, status=status) - - -@routes.get( - _path_prefix + "/{location_id}/files/{file_id}/metadata", - name="get_file_metadata", -) -@login_required -@permission_required("storage.files.*") -async def get_file_metadata(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - file_id: StorageFileIDStr - - parse_request_path_parameters_as(_PathParams, request) - - payload, status = await _forward_request_to_storage(request, "GET") - return create_data_response(payload, status=status) - - -@routes.get( - _path_prefix + "/{location_id}/files/{file_id}", - name="download_file", -) -@login_required -@permission_required("storage.files.*") -async def download_file(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - file_id: StorageFileIDStr - - parse_request_path_parameters_as(_PathParams, request) - - class _QueryParams(BaseModel): - link_type: LinkType = LinkType.PRESIGNED - - parse_request_query_parameters_as(_QueryParams, request) - - payload, status = await _forward_request_to_storage(request, "GET", body=None) - return create_data_response(payload, status=status) - - -@routes.put( - _path_prefix + "/{location_id}/files/{file_id}", - name="upload_file", -) -@login_required -@permission_required("storage.files.*") -async def upload_file(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - file_id: StorageFileIDStr - - parse_request_path_parameters_as(_PathParams, request) - - class _QueryParams(BaseModel): - file_size: ByteSize | None = None - link_type: LinkType = LinkType.PRESIGNED - is_directory: bool = False - - parse_request_query_parameters_as(_QueryParams, request) - - payload, status = await _forward_request_to_storage(request, "PUT", body=None) - data, _ = unwrap_envelope(payload) - file_upload_schema = FileUploadSchema.model_validate(data) - file_upload_schema.links.complete_upload = _from_storage_url( - request, file_upload_schema.links.complete_upload - ) - file_upload_schema.links.abort_upload = _from_storage_url( - request, file_upload_schema.links.abort_upload - ) - return create_data_response(jsonable_encoder(file_upload_schema), status=status) - - -@routes.post( - _path_prefix + "/{location_id}/files/{file_id}:complete", - name="complete_upload_file", -) -@login_required -@permission_required("storage.files.*") -async def complete_upload_file(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - file_id: StorageFileIDStr - - parse_request_path_parameters_as(_PathParams, request) - body_item = await parse_request_body_as(FileUploadCompletionBody, request) - - payload, status = await _forward_request_to_storage( - request, "POST", body=body_item.model_dump() - ) - data, _ = unwrap_envelope(payload) - file_upload_complete = FileUploadCompleteResponse.model_validate(data) - file_upload_complete.links.state = _from_storage_url( - request, file_upload_complete.links.state - ) - return create_data_response(jsonable_encoder(file_upload_complete), status=status) - - -@routes.post( - _path_prefix + "/{location_id}/files/{file_id}:abort", - name="abort_upload_file", -) -@login_required -@permission_required("storage.files.*") -async def abort_upload_file(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - file_id: StorageFileIDStr - - parse_request_path_parameters_as(_PathParams, request) - - payload, status = await _forward_request_to_storage(request, "POST", body=None) - return create_data_response(payload, status=status) - - -@routes.post( - _path_prefix + "/{location_id}/files/{file_id}:complete/futures/{future_id}", - name="is_completed_upload_file", -) -@login_required -@permission_required("storage.files.*") -async def is_completed_upload_file(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - file_id: StorageFileIDStr - future_id: str - - parse_request_path_parameters_as(_PathParams, request) - - payload, status = await _forward_request_to_storage(request, "POST", body=None) - return create_data_response(payload, status=status) - - -@routes.delete( - _path_prefix + "/{location_id}/files/{file_id}", - name="delete_file", -) -@login_required -@permission_required("storage.files.*") -async def delete_file(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - file_id: StorageFileIDStr - - parse_request_path_parameters_as(_PathParams, request) - - payload, status = await _forward_request_to_storage(request, "DELETE", body=None) - return create_data_response(payload, status=status) - - -@routes.post( - _path_prefix + "/{location_id}:sync", - name="synchronise_meta_data_table", -) -@login_required -@permission_required("storage.files.sync") -async def synchronise_meta_data_table(request: web.Request) -> web.Response: - class _PathParams(BaseModel): - location_id: LocationID - - parse_request_path_parameters_as(_PathParams, request) - - class _QueryParams(BaseModel): - dry_run: bool = False - fire_and_forget: bool = False - - parse_request_query_parameters_as(_QueryParams, request) - - payload, status = await _forward_request_to_storage(request, "POST", body=None) - return create_data_response(payload, status=status) diff --git a/services/web/server/src/simcore_service_webserver/storage/_rest.py b/services/web/server/src/simcore_service_webserver/storage/_rest.py new file mode 100644 index 00000000000..fbc419d9015 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/storage/_rest.py @@ -0,0 +1,475 @@ +"""Handlers exposed by storage subsystem + +Mostly resolves and redirect to storage API +""" + +import logging +import urllib.parse +from typing import Any, Final, NamedTuple +from urllib.parse import quote, unquote + +from aiohttp import ClientTimeout, web +from models_library.api_schemas_long_running_tasks.tasks import ( + TaskGet, +) +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobNameData, +) +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompleteResponse, + FileUploadCompletionBody, + FileUploadSchema, + LinkType, +) +from models_library.api_schemas_webserver.storage import ( + DataExportPost, + StoragePathComputeSizeParams, +) +from models_library.projects_nodes_io import LocationID +from models_library.utils.change_case import camel_to_snake +from models_library.utils.fastapi_encoders import jsonable_encoder +from pydantic import AnyUrl, BaseModel, ByteSize, TypeAdapter +from servicelib.aiohttp import status +from servicelib.aiohttp.client_session import get_client_session +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) +from servicelib.aiohttp.rest_responses import create_data_response +from servicelib.common_headers import X_FORWARDED_PROTO +from servicelib.rabbitmq.rpc_interfaces.storage.data_export import start_data_export +from servicelib.rabbitmq.rpc_interfaces.storage.paths import ( + compute_path_size as remote_compute_path_size, +) +from servicelib.request_keys import RQT_USERID_KEY +from servicelib.rest_responses import unwrap_envelope +from yarl import URL + +from .._meta import API_VTAG +from ..login.decorators import login_required +from ..models import RequestContext +from ..rabbitmq import get_rabbitmq_rpc_client +from ..security.decorators import permission_required +from ..tasks._exception_handlers import handle_data_export_exceptions +from .schemas import StorageFileIDStr +from .settings import StorageSettings, get_plugin_settings + +log = logging.getLogger(__name__) + + +def _get_base_storage_url(app: web.Application) -> URL: + settings: StorageSettings = get_plugin_settings(app) + return URL(settings.base_url, encoded=True) + + +def _get_storage_vtag(app: web.Application) -> str: + settings: StorageSettings = get_plugin_settings(app) + storage_prefix: str = settings.STORAGE_VTAG + return storage_prefix + + +def _to_storage_url(request: web.Request) -> URL: + """Converts web-api url to storage-api url""" + userid = request[RQT_USERID_KEY] + + # storage service API endpoint + url = _get_base_storage_url(request.app) + + basepath_index = 3 + # strip basepath from webserver API path (i.e. webserver api version) + # >>> URL('http://storage:1234/v5/storage/asdf/').raw_parts[3:] + suffix = "/".join(request.url.parts[basepath_index:]) + # we need to quote anything before the column, but not the column + if (column_index := suffix.find(":")) > 0: + fastapi_encoded_suffix = ( + urllib.parse.quote(suffix[:column_index], safe="/") + suffix[column_index:] + ) + else: + fastapi_encoded_suffix = urllib.parse.quote(suffix, safe="/") + + return ( + url.joinpath(fastapi_encoded_suffix, encoded=True) + .with_query({camel_to_snake(k): v for k, v in request.query.items()}) + .update_query(user_id=userid) + ) + + +def _from_storage_url( + request: web.Request, storage_url: AnyUrl, url_encode: str | None +) -> AnyUrl: + """Converts storage-api url to web-api url""" + assert storage_url.path # nosec + + prefix = f"/{_get_storage_vtag(request.app)}" + converted_url = str( + request.url.with_path( + f"/v0/storage{storage_url.path.removeprefix(prefix)}", encoded=True + ).with_scheme(request.headers.get(X_FORWARDED_PROTO, request.url.scheme)) + ) + if url_encode: + converted_url = converted_url.replace( + url_encode, quote(unquote(url_encode), safe="") + ) + + webserver_url: AnyUrl = TypeAdapter(AnyUrl).validate_python(f"{converted_url}") + return webserver_url + + +class _ResponseTuple(NamedTuple): + payload: Any + status_code: int + + +async def _forward_request_to_storage( + request: web.Request, + method: str, + body: dict[str, Any] | None = None, + **kwargs, +) -> _ResponseTuple: + url = _to_storage_url(request) + session = get_client_session(request.app) + + async with session.request( + method.upper(), url, ssl=False, json=body, **kwargs + ) as resp: + match resp.status: + case status.HTTP_422_UNPROCESSABLE_ENTITY: + raise web.HTTPUnprocessableEntity( + reason=await resp.text(), content_type=resp.content_type + ) + case status.HTTP_404_NOT_FOUND: + raise web.HTTPNotFound(reason=await resp.text()) + case _ if resp.status >= status.HTTP_400_BAD_REQUEST: + raise web.HTTPError(reason=await resp.text()) + case _: + payload = await resp.json() + return _ResponseTuple(payload=payload, status_code=resp.status) + + +# --------------------------------------------------------------------- + +routes = web.RouteTableDef() +_storage_prefix = f"/{API_VTAG}/storage" +_storage_locations_prefix = f"{_storage_prefix}/locations" + + +@routes.get(_storage_locations_prefix, name="list_storage_locations") +@login_required +@permission_required("storage.files.*") +async def list_storage_locations(request: web.Request) -> web.Response: + payload, resp_status = await _forward_request_to_storage(request, "GET", body=None) + return create_data_response(payload, status=resp_status) + + +@routes.get( + f"{_storage_locations_prefix}/{{location_id}}/paths", name="list_storage_paths" +) +@login_required +@permission_required("storage.files.*") +async def list_paths(request: web.Request) -> web.Response: + payload, resp_status = await _forward_request_to_storage(request, "GET", body=None) + return create_data_response(payload, status=resp_status) + + +@routes.post( + f"{_storage_locations_prefix}/{{location_id}}/paths/{{path}}:size", + name="compute_path_size", +) +@login_required +@permission_required("storage.files.*") +async def compute_path_size(request: web.Request) -> web.Response: + req_ctx = RequestContext.model_validate(request) + path_params = parse_request_path_parameters_as( + StoragePathComputeSizeParams, request + ) + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + async_job, _ = await remote_compute_path_size( + rabbitmq_rpc_client, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + location_id=path_params.location_id, + path=path_params.path, + ) + + _job_id = f"{async_job.job_id}" + return create_data_response( + TaskGet( + task_id=_job_id, + task_name=_job_id, + status_href=f"{request.url.with_path(str(request.app.router['get_async_job_status'].url_for(task_id=_job_id)))}", + abort_href=f"{request.url.with_path(str(request.app.router['abort_async_job'].url_for(task_id=_job_id)))}", + result_href=f"{request.url.with_path(str(request.app.router['get_async_job_result'].url_for(task_id=_job_id)))}", + ), + status=status.HTTP_202_ACCEPTED, + ) + + +@routes.get( + _storage_locations_prefix + "/{location_id}/datasets", name="list_datasets_metadata" +) +@login_required +@permission_required("storage.files.*") +async def list_datasets_metadata(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + + parse_request_path_parameters_as(_PathParams, request) + + payload, resp_status = await _forward_request_to_storage(request, "GET", body=None) + return create_data_response(payload, status=resp_status) + + +@routes.get( + _storage_locations_prefix + "/{location_id}/files/metadata", + name="get_files_metadata", +) +@login_required +@permission_required("storage.files.*") +async def get_files_metadata(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + + parse_request_path_parameters_as(_PathParams, request) + + class _QueryParams(BaseModel): + uuid_filter: str = "" + expand_dirs: bool = True + + parse_request_query_parameters_as(_QueryParams, request) + + payload, resp_status = await _forward_request_to_storage(request, "GET", body=None) + return create_data_response(payload, status=resp_status) + + +_LIST_ALL_DATASETS_TIMEOUT_S: Final[int] = 60 + + +@routes.get( + _storage_locations_prefix + "/{location_id}/datasets/{dataset_id}/metadata", + name="list_dataset_files_metadata", +) +@login_required +@permission_required("storage.files.*") +async def list_dataset_files_metadata(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + dataset_id: str + + parse_request_path_parameters_as(_PathParams, request) + + class _QueryParams(BaseModel): + uuid_filter: str = "" + expand_dirs: bool = True + + parse_request_query_parameters_as(_QueryParams, request) + + payload, resp_status = await _forward_request_to_storage( + request, + "GET", + body=None, + timeout=ClientTimeout(total=_LIST_ALL_DATASETS_TIMEOUT_S), + ) + return create_data_response(payload, status=resp_status) + + +@routes.get( + _storage_locations_prefix + "/{location_id}/files/{file_id}/metadata", + name="get_file_metadata", +) +@login_required +@permission_required("storage.files.*") +async def get_file_metadata(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + file_id: StorageFileIDStr + + parse_request_path_parameters_as(_PathParams, request) + + payload, resp_status = await _forward_request_to_storage(request, "GET") + return create_data_response(payload, status=resp_status) + + +@routes.get( + _storage_locations_prefix + "/{location_id}/files/{file_id}", + name="download_file", +) +@login_required +@permission_required("storage.files.*") +async def download_file(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + file_id: StorageFileIDStr + + parse_request_path_parameters_as(_PathParams, request) + + class _QueryParams(BaseModel): + link_type: LinkType = LinkType.PRESIGNED + + parse_request_query_parameters_as(_QueryParams, request) + + payload, resp_status = await _forward_request_to_storage(request, "GET", body=None) + return create_data_response(payload, status=resp_status) + + +@routes.put( + _storage_locations_prefix + "/{location_id}/files/{file_id}", + name="upload_file", +) +@login_required +@permission_required("storage.files.*") +async def upload_file(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + file_id: StorageFileIDStr + + path_params = parse_request_path_parameters_as(_PathParams, request) + + class _QueryParams(BaseModel): + file_size: ByteSize | None = None + link_type: LinkType = LinkType.PRESIGNED + is_directory: bool = False + + parse_request_query_parameters_as(_QueryParams, request) + + payload, resp_status = await _forward_request_to_storage(request, "PUT", body=None) + data, _ = unwrap_envelope(payload) + file_upload_schema = FileUploadSchema.model_validate(data) + # NOTE: since storage is fastapi-based it returns file_id not url encoded and aiohttp does not like it + # /v0/locations/{location_id}/files/{file_id:non-encoded-containing-slashes}:complete --> /v0/storage/locations/{location_id}/files/{file_id:non-encode}:complete + storage_encoded_file_id = quote(path_params.file_id, safe="/") + file_upload_schema.links.complete_upload = _from_storage_url( + request, + file_upload_schema.links.complete_upload, + url_encode=storage_encoded_file_id, + ) + file_upload_schema.links.abort_upload = _from_storage_url( + request, + file_upload_schema.links.abort_upload, + url_encode=storage_encoded_file_id, + ) + return create_data_response( + jsonable_encoder(file_upload_schema), status=resp_status + ) + + +@routes.post( + _storage_locations_prefix + "/{location_id}/files/{file_id}:complete", + name="complete_upload_file", +) +@login_required +@permission_required("storage.files.*") +async def complete_upload_file(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + file_id: StorageFileIDStr + + path_params = parse_request_path_parameters_as(_PathParams, request) + body_item = await parse_request_body_as(FileUploadCompletionBody, request) + + payload, resp_status = await _forward_request_to_storage( + request, "POST", body=body_item.model_dump() + ) + data, _ = unwrap_envelope(payload) + storage_encoded_file_id = quote(path_params.file_id, safe="/") + file_upload_complete = FileUploadCompleteResponse.model_validate(data) + file_upload_complete.links.state = _from_storage_url( + request, file_upload_complete.links.state, url_encode=storage_encoded_file_id + ) + return create_data_response( + jsonable_encoder(file_upload_complete), status=resp_status + ) + + +@routes.post( + _storage_locations_prefix + "/{location_id}/files/{file_id}:abort", + name="abort_upload_file", +) +@login_required +@permission_required("storage.files.*") +async def abort_upload_file(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + file_id: StorageFileIDStr + + parse_request_path_parameters_as(_PathParams, request) + + payload, resp_status = await _forward_request_to_storage(request, "POST", body=None) + return create_data_response(payload, status=resp_status) + + +@routes.post( + _storage_locations_prefix + + "/{location_id}/files/{file_id}:complete/futures/{future_id}", + name="is_completed_upload_file", +) +@login_required +@permission_required("storage.files.*") +async def is_completed_upload_file(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + file_id: StorageFileIDStr + future_id: str + + parse_request_path_parameters_as(_PathParams, request) + + payload, resp_status = await _forward_request_to_storage(request, "POST", body=None) + return create_data_response(payload, status=resp_status) + + +@routes.delete( + _storage_locations_prefix + "/{location_id}/files/{file_id}", + name="delete_file", +) +@login_required +@permission_required("storage.files.*") +async def delete_file(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + file_id: StorageFileIDStr + + parse_request_path_parameters_as(_PathParams, request) + + payload, resp_status = await _forward_request_to_storage( + request, "DELETE", body=None + ) + return create_data_response(payload, status=resp_status) + + +@routes.post( + _storage_locations_prefix + "/{location_id}/export-data", name="export_data" +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def export_data(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + _req_ctx = RequestContext.model_validate(request) + _path_params = parse_request_path_parameters_as(_PathParams, request) + data_export_post = await parse_request_body_as( + model_schema_cls=DataExportPost, request=request + ) + async_job_rpc_get = await start_data_export( + rabbitmq_rpc_client=rabbitmq_rpc_client, + job_id_data=AsyncJobNameData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + data_export_start=data_export_post.to_rpc_schema( + location_id=_path_params.location_id, + ), + ) + _job_id = f"{async_job_rpc_get.job_id}" + return create_data_response( + TaskGet( + task_id=_job_id, + task_name=_job_id, + status_href=f"{request.url.with_path(str(request.app.router['get_async_job_status'].url_for(task_id=_job_id)))}", + abort_href=f"{request.url.with_path(str(request.app.router['abort_async_job'].url_for(task_id=_job_id)))}", + result_href=f"{request.url.with_path(str(request.app.router['get_async_job_result'].url_for(task_id=_job_id)))}", + ), + status=status.HTTP_202_ACCEPTED, + ) diff --git a/services/web/server/src/simcore_service_webserver/storage/api.py b/services/web/server/src/simcore_service_webserver/storage/api.py index 8e1ad334beb..9d65ac3faf3 100644 --- a/services/web/server/src/simcore_service_webserver/storage/api.py +++ b/services/web/server/src/simcore_service_webserver/storage/api.py @@ -1,6 +1,4 @@ -""" Storage subsystem's API: responsible of communication with storage service - -""" +"""Storage subsystem's API: responsible of communication with storage service""" import asyncio import logging @@ -9,7 +7,7 @@ from typing import Any, Final from aiohttp import ClientError, ClientSession, ClientTimeout, web -from models_library.api_schemas_storage import ( +from models_library.api_schemas_storage.storage_schemas import ( FileLocation, FileLocationArray, FileMetaDataGet, @@ -48,7 +46,7 @@ def _get_storage_client(app: web.Application) -> tuple[ClientSession, URL]: return session, endpoint -async def get_storage_locations( +async def list_storage_locations( app: web.Application, user_id: UserID ) -> FileLocationArray: _logger.debug("getting %s accessible locations...", f"{user_id=}") diff --git a/services/web/server/src/simcore_service_webserver/storage/plugin.py b/services/web/server/src/simcore_service_webserver/storage/plugin.py index 104a9c37319..e0c17eb8b0f 100644 --- a/services/web/server/src/simcore_service_webserver/storage/plugin.py +++ b/services/web/server/src/simcore_service_webserver/storage/plugin.py @@ -7,9 +7,9 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..rest.plugin import setup_rest -from . import _handlers +from . import _rest _logger = logging.getLogger(__name__) @@ -21,4 +21,4 @@ def setup_storage(app: web.Application): assert app[APP_SETTINGS_KEY].WEBSERVER_STORAGE # nosec setup_rest(app) - app.router.add_routes(_handlers.routes) + app.router.add_routes(_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/storage/schemas.py b/services/web/server/src/simcore_service_webserver/storage/schemas.py index 26381218c0e..9840e89e225 100644 --- a/services/web/server/src/simcore_service_webserver/storage/schemas.py +++ b/services/web/server/src/simcore_service_webserver/storage/schemas.py @@ -1,7 +1,6 @@ from enum import Enum from typing import Any, TypeAlias -from models_library.api_schemas_storage import TableSynchronisation from pydantic import BaseModel, ConfigDict, Field, RootModel # NOTE: storage generates URLs that contain double encoded @@ -79,11 +78,6 @@ class FileLocationEnveloped(BaseModel): error: Any | None = None -class TableSynchronisationEnveloped(BaseModel): - data: TableSynchronisation - error: Any - - class FileUploadEnveloped(BaseModel): data: FileUploadSchema error: Any diff --git a/services/web/server/src/simcore_service_webserver/storage/settings.py b/services/web/server/src/simcore_service_webserver/storage/settings.py index 04ac00f61c3..38d9befd914 100644 --- a/services/web/server/src/simcore_service_webserver/storage/settings.py +++ b/services/web/server/src/simcore_service_webserver/storage/settings.py @@ -6,7 +6,7 @@ from settings_library.utils_service import DEFAULT_AIOHTTP_PORT, MixinServiceSettings from yarl import URL -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class StorageSettings(BaseCustomSettings, MixinServiceSettings): diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py index 788ca886593..a4adbf8e576 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py @@ -1,8 +1,8 @@ -""" Projects management +"""Projects management - Keeps functionality that couples with the following app modules - - projects - - TMP: add_new_project includes to projects and director_v2 app modules! +Keeps functionality that couples with the following app modules + - projects + - TMP: add_new_project includes to projects and director_v2 app modules! """ @@ -12,18 +12,18 @@ from typing import NamedTuple from aiohttp import web +from models_library.api_schemas_webserver.projects_ui import StudyUI from models_library.projects import DateTimeStr, Project, ProjectID from models_library.projects_access import AccessRights, GroupIDStr from models_library.projects_nodes import Node from models_library.projects_nodes_io import DownloadLink, NodeID, PortLink -from models_library.projects_ui import StudyUI from models_library.services import ServiceKey, ServiceVersion from pydantic import AnyUrl, HttpUrl, TypeAdapter from servicelib.logging_utils import log_decorator -from ..projects.db import ProjectDBAPI +from ..projects._projects_repository_legacy import ProjectDBAPI +from ..projects._projects_service import get_project_for_user from ..projects.exceptions import ProjectInvalidRightsError, ProjectNotFoundError -from ..projects.projects_service import get_project_for_user from ..utils import now_str from ._core import compose_uuid_from from ._models import FileParams, ServiceInfo, ViewerInfo @@ -97,12 +97,16 @@ def _create_project( name=name, description=description, thumbnail=thumbnail, - prjOwner=owner.email, - accessRights={GroupIDStr(owner.primary_gid): access_rights}, - creationDate=DateTimeStr(now_str()), - lastChangeDate=DateTimeStr(now_str()), + prj_owner=owner.email, + access_rights={GroupIDStr(owner.primary_gid): access_rights}, + creation_date=DateTimeStr(now_str()), + last_change_date=DateTimeStr(now_str()), workbench=workbench, - ui=StudyUI(workbench=workbench_ui), # type: ignore[arg-type] + ui=StudyUI.model_validate( + { + "workbench": workbench_ui, + } + ).model_dump(mode="json", exclude_unset=True), ) @@ -184,8 +188,8 @@ async def _add_new_project( # TODO: move this to projects_api # TODO: this piece was taken from the end of projects.projects_handlers.create_projects - from ..director_v2.api import create_or_update_pipeline - from ..projects.db import APP_PROJECT_DBAPI + from ..director_v2.director_v2_service import create_or_update_pipeline + from ..projects._projects_repository_legacy import APP_PROJECT_DBAPI db: ProjectDBAPI = app[APP_PROJECT_DBAPI] diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py index 406982190ec..362bb7509b8 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py @@ -11,8 +11,11 @@ ) from ..db.plugin import get_database_engine -from ..projects.api import ProjectPermalink, register_permalink_factory from ..projects.exceptions import PermalinkNotAllowedError, ProjectNotFoundError +from ..projects.projects_permalink_service import ( + ProjectPermalink, + register_permalink_factory, +) from ..utils_aiohttp import create_url_for_function from .settings import StudiesDispatcherSettings diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py index 060cea75a4d..0a0d37ef17b 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py @@ -1,6 +1,4 @@ -""" Handles request to the viewers redirection entrypoints - -""" +"""Handles request to the viewers redirection entrypoints""" import functools import logging @@ -18,8 +16,8 @@ from servicelib.aiohttp.typing_extension import Handler from servicelib.logging_errors import create_troubleshotting_log_kwargs -from ..dynamic_scheduler import api as dynamic_scheduler_api -from ..products.api import get_product_name +from ..dynamic_scheduler import api as dynamic_scheduler_service +from ..products import products_web from ..utils import compose_support_error_msg from ..utils_aiohttp import create_redirect_to_page_response from ._catalog import ValidService, validate_requested_service @@ -169,8 +167,7 @@ def ensure_extension_upper_and_dotless(cls, v): return v -class ServiceAndFileParams(FileQueryParams, ServiceParams): - ... +class ServiceAndFileParams(FileQueryParams, ServiceParams): ... class ViewerQueryParams(BaseModel): @@ -205,6 +202,7 @@ def ensure_extension_upper_and_dotless(cls, v): | ServiceQueryParams ) + # # API HANDLERS # @@ -250,9 +248,9 @@ async def get_redirection_to_viewer(request: web.Request): user, viewer, file_params.download_link, - product_name=get_product_name(request), + product_name=products_web.get_product_name(request), ) - await dynamic_scheduler_api.update_projects_networks( + await dynamic_scheduler_service.update_projects_networks( request.app, project_id=project_id ) @@ -281,9 +279,9 @@ async def get_redirection_to_viewer(request: web.Request): request.app, user, service_info=_create_service_info_from(valid_service), - product_name=get_product_name(request), + product_name=products_web.get_product_name(request), ) - await dynamic_scheduler_api.update_projects_networks( + await dynamic_scheduler_service.update_projects_networks( request.app, project_id=project_id ) @@ -319,9 +317,9 @@ async def get_redirection_to_viewer(request: web.Request): project_thumbnail=get_plugin_settings( app=request.app ).STUDIES_DEFAULT_FILE_THUMBNAIL, - product_name=get_product_name(request), + product_name=products_web.get_product_name(request), ) - await dynamic_scheduler_api.update_projects_networks( + await dynamic_scheduler_service.update_projects_networks( request.app, project_id=project_id ) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py index b003ad55963..943893972fe 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py @@ -19,7 +19,7 @@ from pydantic.networks import HttpUrl from .._meta import API_VTAG -from ..products.api import get_product_name +from ..products import products_web from ..utils_aiohttp import envelope_json_response from ._catalog import ServiceMetaData, iter_latest_product_services from ._core import list_viewers_info @@ -163,7 +163,7 @@ def remove_dot_prefix_from_extension(cls, v): @routes.get(f"/{API_VTAG}/services", name="list_latest_services") async def list_latest_services(request: Request): """Returns a list latest version of services""" - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) services = [] async for service_data in iter_latest_product_services( diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py index 1dec4c84956..691f6c4df69 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py @@ -1,4 +1,4 @@ -""" handles access to *public* studies +"""handles access to *public* studies Handles a request to share a given sharable study via '/study/{id}' @@ -26,13 +26,13 @@ from servicelib.aiohttp.typing_extension import Handler from servicelib.logging_errors import create_troubleshotting_log_kwargs -from .._constants import INDEX_RESOURCE_NAME +from ..constants import INDEX_RESOURCE_NAME from ..director_v2._core_computations import create_or_update_pipeline -from ..dynamic_scheduler import api as dynamic_scheduler_api -from ..products.api import get_current_product, get_product_name -from ..projects._groups_db import get_project_group +from ..dynamic_scheduler import api as dynamic_scheduler_service +from ..products import products_web +from ..projects._groups_repository import get_project_group +from ..projects._projects_repository_legacy import ProjectDBAPI from ..projects.api import check_user_project_permission -from ..projects.db import ProjectDBAPI from ..projects.exceptions import ( ProjectGroupNotFoundError, ProjectInvalidRightsError, @@ -117,7 +117,7 @@ async def _get_published_template_project( err.debug_message(), ) - support_email = get_current_product(request).support_email + support_email = products_web.get_current_product(request).support_email if only_public_projects: raise RedirectToFrontEndPageError( MSG_PUBLIC_PROJECT_NOT_PUBLISHED.format(support_email=support_email), @@ -141,7 +141,7 @@ async def copy_study_to_account( - Replaces template parameters by values passed in query - Avoids multiple copies of the same template on each account """ - from ..projects.db import APP_PROJECT_DBAPI + from ..projects._projects_repository_legacy import APP_PROJECT_DBAPI from ..projects.utils import clone_project_document, substitute_parameterized_inputs db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] @@ -185,7 +185,7 @@ async def copy_study_to_account( substitute_parameterized_inputs(project, template_parameters) or project ) # add project model + copy data TODO: guarantee order and atomicity - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) await db.insert_project( project, user["id"], @@ -212,7 +212,7 @@ async def copy_study_to_account( await create_or_update_pipeline( request.app, user["id"], project["uuid"], product_name ) - await dynamic_scheduler_api.update_projects_networks( + await dynamic_scheduler_service.update_projects_networks( request.app, project_id=ProjectID(project["uuid"]) ) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py index 531759b062f..ea7b8fecf6c 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py @@ -29,7 +29,7 @@ from ..groups.api import auto_add_user_to_product_group from ..login.storage import AsyncpgStorage, get_plugin_storage from ..login.utils import ACTIVE, GUEST -from ..products.api import get_product_name +from ..products import products_web from ..redis import get_redis_lock_manager_client from ..security.api import ( check_user_authorized, @@ -103,7 +103,7 @@ async def create_temporary_guest_user(request: web.Request): db: AsyncpgStorage = get_plugin_storage(request.app) redis_locks_client: aioredis.Redis = get_redis_lock_manager_client(request.app) settings: StudiesDispatcherSettings = get_plugin_settings(app=request.app) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) random_user_name = "".join( secrets.choice(string.ascii_lowercase) for _ in range(10) diff --git a/services/web/server/src/simcore_service_webserver/tags/_rest.py b/services/web/server/src/simcore_service_webserver/tags/_rest.py index 7550c8343ed..ea39edd6c2a 100644 --- a/services/web/server/src/simcore_service_webserver/tags/_rest.py +++ b/services/web/server/src/simcore_service_webserver/tags/_rest.py @@ -8,11 +8,6 @@ TagNotFoundError, TagOperationNotAllowedError, ) -from simcore_service_webserver.tags.errors import ( - InsufficientTagShareAccessError, - ShareTagWithEveryoneNotAllowedError, - ShareTagWithProductGroupNotAllowedError, -) from .._meta import API_VTAG as VTAG from ..exception_handling import ( @@ -25,6 +20,11 @@ from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from . import _service +from .errors import ( + InsufficientTagShareAccessError, + ShareTagWithEveryoneNotAllowedError, + ShareTagWithProductGroupNotAllowedError, +) from .schemas import ( TagCreate, TagGroupCreate, diff --git a/services/web/server/src/simcore_service_webserver/tags/_service.py b/services/web/server/src/simcore_service_webserver/tags/_service.py index be73441f224..0c28c2a462f 100644 --- a/services/web/server/src/simcore_service_webserver/tags/_service.py +++ b/services/web/server/src/simcore_service_webserver/tags/_service.py @@ -11,7 +11,7 @@ from simcore_postgres_database.utils_tags import TagAccessRightsDict, TagsRepo from sqlalchemy.ext.asyncio import AsyncEngine -from ..products.api import list_products +from ..products import products_service from ..users.api import get_user_role from .errors import ( InsufficientTagShareAccessError, @@ -70,7 +70,7 @@ async def delete_tag(app: web.Application, user_id: UserID, tag_id: IdInt): def _is_product_group(app: web.Application, group_id: GroupID): - products = list_products(app) + products = products_service.list_products(app) return any(group_id == p.group_id for p in products) diff --git a/services/web/server/src/simcore_service_webserver/tags/errors.py b/services/web/server/src/simcore_service_webserver/tags/errors.py index 95fa3185972..579ed5ef125 100644 --- a/services/web/server/src/simcore_service_webserver/tags/errors.py +++ b/services/web/server/src/simcore_service_webserver/tags/errors.py @@ -1,8 +1,9 @@ +# pylint: disable=too-many-ancestors + from ..errors import WebServerBaseError -class TagsPermissionError(WebServerBaseError, PermissionError): - ... +class TagsPermissionError(WebServerBaseError, PermissionError): ... class ShareTagWithEveryoneNotAllowedError(TagsPermissionError): diff --git a/services/web/server/src/simcore_service_webserver/tasks/__init__.py b/services/web/server/src/simcore_service_webserver/tasks/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/web/server/src/simcore_service_webserver/tasks/_exception_handlers.py b/services/web/server/src/simcore_service_webserver/tasks/_exception_handlers.py new file mode 100644 index 00000000000..8e4f467cf47 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/tasks/_exception_handlers.py @@ -0,0 +1,60 @@ +from models_library.api_schemas_rpc_async_jobs.exceptions import ( + JobAbortedError, + JobError, + JobMissingError, + JobNotDoneError, + JobSchedulerError, + JobStatusError, +) +from models_library.api_schemas_storage.data_export_async_jobs import ( + AccessRightError, + InvalidFileIdentifierError, +) +from servicelib.aiohttp import status + +from ..exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + InvalidFileIdentifierError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Could not find file {file_id}", + ), + AccessRightError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Accessright error: user {user_id} does not have access to file {file_id} with location {location_id}", + ), + JobAbortedError: HttpErrorInfo( + status.HTTP_410_GONE, + "Task {job_id} is aborted", + ), + JobError: HttpErrorInfo( + status.HTTP_500_INTERNAL_SERVER_ERROR, + "Task {job_id} failed with exception type {exc_type} and message {exc_msg}", + ), + JobNotDoneError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "task {job_id} is not done yet", + ), + JobMissingError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "No task with id: {job_id}", + ), + JobSchedulerError: HttpErrorInfo( + status.HTTP_500_INTERNAL_SERVER_ERROR, + "Encountered an error with the task scheduling system", + ), + JobStatusError: HttpErrorInfo( + status.HTTP_500_INTERNAL_SERVER_ERROR, + "Encountered an error while getting the status of task {job_id}", + ), +} + + +handle_data_export_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/tasks/_rest.py b/services/web/server/src/simcore_service_webserver/tasks/_rest.py new file mode 100644 index 00000000000..d1c74ce6da7 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/tasks/_rest.py @@ -0,0 +1,188 @@ +"""Handlers exposed by storage subsystem + +Mostly resolves and redirect to storage API +""" + +import logging +from typing import Final +from uuid import UUID + +from aiohttp import web +from models_library.api_schemas_long_running_tasks.base import TaskProgress +from models_library.api_schemas_long_running_tasks.tasks import ( + TaskGet, + TaskResult, + TaskStatus, +) +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobId, + AsyncJobNameData, +) +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from models_library.generics import Envelope +from pydantic import BaseModel +from servicelib.aiohttp import status +from servicelib.aiohttp.client_session import get_client_session +from servicelib.aiohttp.requests_validation import ( + parse_request_path_parameters_as, +) +from servicelib.aiohttp.rest_responses import create_data_response +from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs + +from .._meta import API_VTAG +from ..login.decorators import login_required +from ..models import RequestContext +from ..rabbitmq import get_rabbitmq_rpc_client +from ..security.decorators import permission_required +from ._exception_handlers import handle_data_export_exceptions + +log = logging.getLogger(__name__) + + +routes = web.RouteTableDef() + +_task_prefix: Final[str] = f"/{API_VTAG}/tasks" + + +@routes.get( + _task_prefix, + name="get_async_jobs", +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def get_async_jobs(request: web.Request) -> web.Response: + session = get_client_session(request.app) + async with session.request( + "GET", + request.url.with_path(str(request.app.router["list_tasks"].url_for())), + cookies=request.cookies, + ) as resp: + if resp.status != status.HTTP_200_OK: + return web.Response( + status=resp.status, + body=await resp.read(), + content_type=resp.content_type, + ) + inprocess_tasks = ( + Envelope[list[TaskGet]].model_validate_json(await resp.text()).data + ) + assert inprocess_tasks is not None # nosec + + _req_ctx = RequestContext.model_validate(request) + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + + user_async_jobs = await async_jobs.list_jobs( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id_data=AsyncJobNameData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + filter_="", + ) + return create_data_response( + [ + TaskGet( + task_id=f"{job.job_id}", + task_name=f"{job.job_id}", + status_href=f"{request.url.with_path(str(request.app.router['get_async_job_status'].url_for(task_id=str(job.job_id))))}", + abort_href=f"{request.url.with_path(str(request.app.router['abort_async_job'].url_for(task_id=str(job.job_id))))}", + result_href=f"{request.url.with_path(str(request.app.router['get_async_job_result'].url_for(task_id=str(job.job_id))))}", + ) + for job in user_async_jobs + ] + + inprocess_tasks, + status=status.HTTP_200_OK, + ) + + +class _StorageAsyncJobId(BaseModel): + task_id: AsyncJobId + + +@routes.get( + _task_prefix + "/{task_id}", + name="get_async_job_status", +) +@login_required +@handle_data_export_exceptions +async def get_async_job_status(request: web.Request) -> web.Response: + + _req_ctx = RequestContext.model_validate(request) + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + + async_job_get = parse_request_path_parameters_as(_StorageAsyncJobId, request) + async_job_rpc_status = await async_jobs.status( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=async_job_get.task_id, + job_id_data=AsyncJobNameData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + ) + _task_id = f"{async_job_rpc_status.job_id}" + return create_data_response( + TaskStatus( + task_progress=TaskProgress( + task_id=_task_id, percent=async_job_rpc_status.progress.percent_value + ), + done=async_job_rpc_status.done, + started=None, + ), + status=status.HTTP_200_OK, + ) + + +@routes.delete( + _task_prefix + "/{task_id}", + name="abort_async_job", +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def abort_async_job(request: web.Request) -> web.Response: + + _req_ctx = RequestContext.model_validate(request) + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + async_job_get = parse_request_path_parameters_as(_StorageAsyncJobId, request) + await async_jobs.cancel( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=async_job_get.task_id, + job_id_data=AsyncJobNameData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + ) + return web.Response(status=status.HTTP_204_NO_CONTENT) + + +@routes.get( + _task_prefix + "/{task_id}/result", + name="get_async_job_result", +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def get_async_job_result(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + task_id: UUID + + _req_ctx = RequestContext.model_validate(request) + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + async_job_get = parse_request_path_parameters_as(_PathParams, request) + async_job_rpc_result = await async_jobs.result( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=async_job_get.task_id, + job_id_data=AsyncJobNameData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + ) + + return create_data_response( + TaskResult(result=async_job_rpc_result.result, error=None), + status=status.HTTP_200_OK, + ) diff --git a/services/web/server/src/simcore_service_webserver/tasks/plugin.py b/services/web/server/src/simcore_service_webserver/tasks/plugin.py new file mode 100644 index 00000000000..e9bfdeea222 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/tasks/plugin.py @@ -0,0 +1,9 @@ +from aiohttp import web + +from ..rest.plugin import setup_rest +from . import _rest + + +def setup_tasks(app: web.Application): + setup_rest(app) + app.router.add_routes(_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/templates/common/reset_password_email_failed.jinja2 b/services/web/server/src/simcore_service_webserver/templates/common/reset_password_email_failed.jinja2 deleted file mode 100644 index 86a984dff35..00000000000 --- a/services/web/server/src/simcore_service_webserver/templates/common/reset_password_email_failed.jinja2 +++ /dev/null @@ -1,14 +0,0 @@ -Reset Password on {{ host }} - -

Dear {{ name }},

-

A request to reset your {{ host }} password has been made.

-

It could not be completed due to the following reason:

-

{{ reason }}

-

If you did not request this, please contact us immediatly at {{ product.support_email }} for security reasons.

-

Best Regards,

-

The {{ product.display_name }} Team

diff --git a/services/web/server/src/simcore_service_webserver/tracing.py b/services/web/server/src/simcore_service_webserver/tracing.py index 23041d95238..d07757106e8 100644 --- a/services/web/server/src/simcore_service_webserver/tracing.py +++ b/services/web/server/src/simcore_service_webserver/tracing.py @@ -5,8 +5,8 @@ from servicelib.aiohttp.tracing import setup_tracing from settings_library.tracing import TracingSettings -from ._constants import APP_SETTINGS_KEY from ._meta import APP_NAME +from .constants import APP_SETTINGS_KEY log = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/trash/_rest.py b/services/web/server/src/simcore_service_webserver/trash/_rest.py index f5912b042fe..d6971984086 100644 --- a/services/web/server/src/simcore_service_webserver/trash/_rest.py +++ b/services/web/server/src/simcore_service_webserver/trash/_rest.py @@ -1,7 +1,10 @@ +import asyncio import logging from aiohttp import web from servicelib.aiohttp import status +from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY +from servicelib.utils import fire_and_forget_task from .._meta import API_VTAG as VTAG from ..exception_handling import ( @@ -11,17 +14,13 @@ to_exceptions_handlers_map, ) from ..login.decorators import get_user_id, login_required -from ..products.api import get_product_name +from ..products import products_web from ..projects.exceptions import ProjectRunningConflictError, ProjectStoppingError from ..security.decorators import permission_required from . import _service _logger = logging.getLogger(__name__) -# -# EXCEPTIONS HANDLING -# - _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { ProjectRunningConflictError: HttpErrorInfo( @@ -40,21 +39,34 @@ ) -# -# ROUTES -# - routes = web.RouteTableDef() -@routes.delete(f"/{VTAG}/trash", name="empty_trash") +@routes.post(f"/{VTAG}/trash:empty", name="empty_trash") @login_required @permission_required("project.delete") @_handle_exceptions async def empty_trash(request: web.Request): user_id = get_user_id(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) + + explicitly_trashed_project_deleted = asyncio.Event() + + fire_and_forget_task( + _service.safe_empty_trash( + request.app, + product_name=product_name, + user_id=user_id, + on_explicitly_trashed_projects_deleted=explicitly_trashed_project_deleted, + ), + task_suffix_name="rest.empty_trash", + fire_and_forget_tasks_collection=request.app[APP_FIRE_AND_FORGET_TASKS_KEY], + ) - await _service.empty_trash(request.app, product_name=product_name, user_id=user_id) + # NOTE: Ensures `fire_and_forget_task` is triggered and deletes explicit projects; + # otherwise, when the front-end requests the trash item list, + # it may still display items, misleading the user into + # thinking the `empty trash` operation failed. + await explicitly_trashed_project_deleted.wait() return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/trash/_service.py b/services/web/server/src/simcore_service_webserver/trash/_service.py index cc94d680d64..1f38043faf5 100644 --- a/services/web/server/src/simcore_service_webserver/trash/_service.py +++ b/services/web/server/src/simcore_service_webserver/trash/_service.py @@ -1,36 +1,179 @@ import asyncio import logging from datetime import timedelta +from typing import Final +import arrow from aiohttp import web from models_library.products import ProductName from models_library.users import UserID +from servicelib.logging_errors import create_troubleshotting_log_kwargs +from servicelib.logging_utils import log_context +from ..folders import folders_trash_service +from ..products import products_service +from ..projects import projects_trash_service from .settings import get_plugin_settings _logger = logging.getLogger(__name__) +_TIP: Final[str] = ( + "`empty_trash_safe` is set `fail_fast=False`." + "\nErrors while deletion are ignored." + "\nNew runs might resolve them" +) -async def empty_trash(app: web.Application, product_name: ProductName, user_id: UserID): - assert app # nosec - # filter trashed=True and set them to False - _logger.debug( - "CODE PLACEHOLDER: all projects marked as trashed of %s in %s are deleted", - f"{user_id=}", - f"{product_name=}", + +async def _empty_explicitly_trashed_projects( + app: web.Application, product_name: ProductName, user_id: UserID +): + trashed_projects_ids = ( + await projects_trash_service.list_explicitly_trashed_projects( + app=app, product_name=product_name, user_id=user_id + ) ) - raise NotImplementedError + with log_context( + _logger, + logging.DEBUG, + "Deleting %s explicitly trashed projects", + len(trashed_projects_ids), + ): + for project_id in trashed_projects_ids: + try: + + await projects_trash_service.delete_explicitly_trashed_project( + app, + user_id=user_id, + project_id=project_id, + ) + + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.warning( + **create_troubleshotting_log_kwargs( + "Error deleting a trashed project while emptying trash.", + error=exc, + error_context={ + "project_id": project_id, + "product_name": product_name, + "user_id": user_id, + }, + tip=_TIP, + ) + ) + + +async def _empty_explicitly_trashed_folders_and_content( + app: web.Application, product_name: ProductName, user_id: UserID +): + trashed_folders_ids = await folders_trash_service.list_explicitly_trashed_folders( + app=app, product_name=product_name, user_id=user_id + ) + + with log_context( + _logger, + logging.DEBUG, + "Deleting %s trashed folders (and all its content)", + len(trashed_folders_ids), + ): + for folder_id in trashed_folders_ids: + try: + await folders_trash_service.delete_trashed_folder( + app, + product_name=product_name, + user_id=user_id, + folder_id=folder_id, + ) + + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.warning( + **create_troubleshotting_log_kwargs( + "Error deleting a trashed folders (and content) while emptying trash.", + error=exc, + error_context={ + "folder_id": folder_id, + "product_name": product_name, + "user_id": user_id, + }, + tip=_TIP, + ) + ) + + +async def safe_empty_trash( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + on_explicitly_trashed_projects_deleted: asyncio.Event | None = None +): + # Delete explicitly trashed projects & notify + await _empty_explicitly_trashed_projects(app, product_name, user_id) + if on_explicitly_trashed_projects_deleted: + on_explicitly_trashed_projects_deleted.set() + + # Delete explicitly trashed folders (and all implicitly trashed sub-folders and projects) + await _empty_explicitly_trashed_folders_and_content(app, product_name, user_id) -async def prune_trash(app: web.Application) -> list[str]: - """Deletes expired items in the trash""" + +async def safe_delete_expired_trash_as_admin(app: web.Application) -> None: settings = get_plugin_settings(app) retention = timedelta(days=settings.TRASH_RETENTION_DAYS) + delete_until = arrow.now().datetime - retention - _logger.debug( - "CODE PLACEHOLDER: **ALL** projects marked as trashed during %s days are deleted", - retention, - ) - await asyncio.sleep(5) + app_products_names = await products_service.list_products_names(app) + + for product_name in app_products_names: + + ctx = { + "delete_until": delete_until, + "retention": retention, + "product_name": product_name, + } + + with log_context( + _logger, + logging.DEBUG, + "Deleting items marked as trashed before %s in %s [trashed_at < %s will be deleted]", + retention, + product_name, + delete_until, + ): + try: + + await folders_trash_service.batch_delete_trashed_folders_as_admin( + app, + trashed_before=delete_until, + product_name=product_name, + fail_fast=False, + ) + + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.warning( + **create_troubleshotting_log_kwargs( + "Error batch deleting expired trashed folders as admin.", + error=exc, + error_context=ctx, + ) + ) + + try: + + deleted_project_ids = ( + await projects_trash_service.batch_delete_trashed_projects_as_admin( + app, + trashed_before=delete_until, + fail_fast=False, + ) + ) + + _logger.info("Deleted %d trashed projects", len(deleted_project_ids)) - return [] + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.warning( + **create_troubleshotting_log_kwargs( + "Error batch deleting expired projects as admin.", + error=exc, + error_context=ctx, + ) + ) diff --git a/services/web/server/src/simcore_service_webserver/trash/plugin.py b/services/web/server/src/simcore_service_webserver/trash/plugin.py index a4cde641596..977a1c74884 100644 --- a/services/web/server/src/simcore_service_webserver/trash/plugin.py +++ b/services/web/server/src/simcore_service_webserver/trash/plugin.py @@ -8,7 +8,7 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY from ..folders.plugin import setup_folders from ..projects.plugin import setup_projects from ..workspaces.plugin import setup_workspaces diff --git a/services/web/server/src/simcore_service_webserver/trash/settings.py b/services/web/server/src/simcore_service_webserver/trash/settings.py index 38d4f91fdcb..f51832b9aa7 100644 --- a/services/web/server/src/simcore_service_webserver/trash/settings.py +++ b/services/web/server/src/simcore_service_webserver/trash/settings.py @@ -2,7 +2,7 @@ from pydantic import Field, NonNegativeInt from settings_library.base import BaseCustomSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class TrashSettings(BaseCustomSettings): diff --git a/services/web/server/src/simcore_service_webserver/trash/trash_service.py b/services/web/server/src/simcore_service_webserver/trash/trash_service.py new file mode 100644 index 00000000000..3cd438f4e1c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/trash/trash_service.py @@ -0,0 +1,4 @@ +from ._service import safe_delete_expired_trash_as_admin + +__all__: tuple[str, ...] = ("safe_delete_expired_trash_as_admin",) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/users/_common/models.py b/services/web/server/src/simcore_service_webserver/users/_common/models.py index 513d8bed102..967f010d0b0 100644 --- a/services/web/server/src/simcore_service_webserver/users/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/users/_common/models.py @@ -55,6 +55,7 @@ class ToUserUpdateDB(BaseModel): first_name: str | None = None last_name: str | None = None + privacy_hide_username: bool | None = None privacy_hide_fullname: bool | None = None privacy_hide_email: bool | None = None diff --git a/services/web/server/src/simcore_service_webserver/users/_common/schemas.py b/services/web/server/src/simcore_service_webserver/users/_common/schemas.py index 04946e21fcc..a76326182ae 100644 --- a/services/web/server/src/simcore_service_webserver/users/_common/schemas.py +++ b/services/web/server/src/simcore_service_webserver/users/_common/schemas.py @@ -18,7 +18,7 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator from servicelib.request_keys import RQT_USERID_KEY -from ..._constants import RQ_PRODUCT_KEY +from ...constants import RQ_PRODUCT_KEY class UsersRequestContext(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py b/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py index 2e243d4da90..65c427bf7b0 100644 --- a/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py +++ b/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py @@ -15,7 +15,7 @@ from .._meta import API_VTAG from ..login.decorators import login_required -from ..products.api import get_product_name +from ..products import products_web from ..redis import get_redis_user_notifications_client from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response @@ -62,7 +62,7 @@ async def _get_user_notifications( async def list_user_notifications(request: web.Request) -> web.Response: redis_client = get_redis_user_notifications_client(request.app) req_ctx = UsersRequestContext.model_validate(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) notifications = await _get_user_notifications( redis_client, req_ctx.user_id, product_name ) diff --git a/services/web/server/src/simcore_service_webserver/users/_users_repository.py b/services/web/server/src/simcore_service_webserver/users/_users_repository.py index 16730437394..8f13169e147 100644 --- a/services/web/server/src/simcore_service_webserver/users/_users_repository.py +++ b/services/web/server/src/simcore_service_webserver/users/_users_repository.py @@ -5,6 +5,7 @@ from aiohttp import web from common_library.users_enums import UserRole from models_library.groups import GroupID +from models_library.products import ProductName from models_library.users import ( MyProfile, UserBillingDetails, @@ -58,8 +59,7 @@ def _public_user_cols(caller_id: int): return ( # Fits PublicUser model users.c.id.label("user_id"), - users.c.name.label("user_name"), - *visible_user_profile_cols(caller_id), + *visible_user_profile_cols(caller_id, username_label="user_name"), users.c.primary_gid.label("group_id"), ) @@ -102,7 +102,10 @@ async def search_public_user( query = ( sa.select(*_public_user_cols(caller_id=caller_id)) .where( - users.c.name.ilike(_pattern) + ( + is_public(users.c.privacy_hide_username, caller_id) + & users.c.name.ilike(_pattern) + ) | ( is_public(users.c.privacy_hide_email, caller_id) & users.c.email.ilike(_pattern) @@ -151,7 +154,10 @@ async def get_user_or_raise( async def get_user_primary_group_id( - engine: AsyncEngine, connection: AsyncConnection | None = None, *, user_id: UserID + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + user_id: UserID, ) -> GroupID: async with pass_or_acquire_connection(engine, connection) as conn: primary_gid: GroupID | None = await conn.scalar( @@ -179,7 +185,7 @@ async def get_users_ids_in_group( return {row.uid async for row in result} -async def get_user_id_from_pgid(app: web.Application, primary_gid: int) -> UserID: +async def get_user_id_from_pgid(app: web.Application, *, primary_gid: int) -> UserID: async with pass_or_acquire_connection(engine=get_asyncpg_engine(app)) as conn: user_id: UserID = await conn.scalar( sa.select( @@ -386,13 +392,9 @@ async def get_user_products( .where(products.c.group_id == groups.c.gid) .label("product_name") ) - products_gis_subq = ( - sa.select( - products.c.group_id, - ) - .distinct() - .subquery() - ) + products_group_ids_subq = sa.select( + products.c.group_id, + ).distinct() query = ( sa.select( groups.c.gid, @@ -402,7 +404,7 @@ async def get_user_products( users.join(user_to_groups, user_to_groups.c.uid == users.c.id).join( groups, (groups.c.gid == user_to_groups.c.gid) - & groups.c.gid.in_(products_gis_subq), + & groups.c.gid.in_(products_group_ids_subq), ) ) .where(users.c.id == user_id) @@ -461,6 +463,32 @@ async def delete_user_by_id( return bool(deleted_user) +async def is_user_in_product_name( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, +) -> bool: + query = ( + sa.select(users.c.id) + .select_from( + users.join( + user_to_groups, + user_to_groups.c.uid == users.c.id, + ).join( + products, + products.c.group_id == user_to_groups.c.gid, + ) + ) + .where((users.c.id == user_id) & (products.c.name == product_name)) + ) + async with pass_or_acquire_connection(engine, connection) as conn: + value = await conn.scalar(query) + assert value is None or value == user_id # nosec + return value is not None + + # # USER PROFILE # @@ -480,6 +508,8 @@ async def get_my_profile(app: web.Application, *, user_id: UserID) -> MyProfile: users.c.email, users.c.role, sa.func.json_build_object( + "hide_username", + users.c.privacy_hide_username, "hide_fullname", users.c.privacy_hide_fullname, "hide_email", @@ -530,11 +560,12 @@ async def update_user_profile( ) except IntegrityError as err: - user_name = updated_values.get("name") - - raise UserNameDuplicateError( - user_name=user_name, - alternative_user_name=generate_alternative_username(user_name), - user_id=user_id, - updated_values=updated_values, - ) from err + if user_name := updated_values.get("name"): + raise UserNameDuplicateError( + user_name=user_name, + alternative_user_name=generate_alternative_username(user_name), + user_id=user_id, + updated_values=updated_values, + ) from err + + raise # not due to name duplication diff --git a/services/web/server/src/simcore_service_webserver/users/_users_rest.py b/services/web/server/src/simcore_service_webserver/users/_users_rest.py index d2ece688514..e89814e5e2d 100644 --- a/services/web/server/src/simcore_service_webserver/users/_users_rest.py +++ b/services/web/server/src/simcore_service_webserver/users/_users_rest.py @@ -15,8 +15,6 @@ parse_request_query_parameters_as, ) from servicelib.rest_constants import RESPONSE_MODEL_POLICY -from simcore_service_webserver.products._api import get_current_product -from simcore_service_webserver.products._model import Product from .._meta import API_VTAG from ..exception_handling import ( @@ -28,6 +26,8 @@ from ..groups import api as groups_api from ..groups.exceptions import GroupNotFoundError from ..login.decorators import login_required +from ..products import products_web +from ..products.models import Product from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from . import _users_service @@ -59,8 +59,7 @@ MissingGroupExtraPropertiesForProductError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, "The product is not ready for use until the configuration is fully completed. " - "Please wait and try again. " - "If this issue persists, contact support indicating this support code: {error_code}.", + "Please wait and try again. ", ), } @@ -81,7 +80,7 @@ @login_required @_handle_users_exceptions async def get_my_profile(request: web.Request) -> web.Response: - product: Product = get_current_product(request) + product: Product = products_web.get_current_product(request) req_ctx = UsersRequestContext.model_validate(request) groups_by_type = await groups_api.list_user_groups_with_read_access( @@ -114,9 +113,6 @@ async def get_my_profile(request: web.Request) -> web.Response: @routes.patch(f"/{API_VTAG}/me", name="update_my_profile") -@routes.put( - f"/{API_VTAG}/me", name="replace_my_profile" # deprecated. Use patch instead -) @login_required @permission_required("user.profile.update") @_handle_users_exceptions @@ -181,7 +177,10 @@ async def search_users_for_admin(request: web.Request) -> web.Response: ) return envelope_json_response( - [_.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) for _ in found] + [ + user_for_admin.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) + for user_for_admin in found + ] ) diff --git a/services/web/server/src/simcore_service_webserver/users/_users_service.py b/services/web/server/src/simcore_service_webserver/users/_users_service.py index 2bb52b85d57..5d71423646d 100644 --- a/services/web/server/src/simcore_service_webserver/users/_users_service.py +++ b/services/web/server/src/simcore_service_webserver/users/_users_service.py @@ -122,7 +122,7 @@ async def get_user_primary_group_id(app: web.Application, user_id: UserID) -> Gr async def get_user_id_from_gid(app: web.Application, primary_gid: GroupID) -> UserID: - return await _users_repository.get_user_id_from_pgid(app, primary_gid) + return await _users_repository.get_user_id_from_pgid(app, primary_gid=primary_gid) async def search_users( @@ -180,6 +180,14 @@ async def get_users_in_group(app: web.Application, *, gid: GroupID) -> set[UserI get_guest_user_ids_and_names = _users_repository.get_guest_user_ids_and_names +async def is_user_in_product( + app: web.Application, *, user_id: UserID, product_name: ProductName +) -> bool: + return await _users_repository.is_user_in_product_name( + get_asyncpg_engine(app), user_id=user_id, product_name=product_name + ) + + # # GET USER PROPERTIES # diff --git a/services/web/server/src/simcore_service_webserver/users/api.py b/services/web/server/src/simcore_service_webserver/users/api.py index 09ca7b757e6..0d00834c02d 100644 --- a/services/web/server/src/simcore_service_webserver/users/api.py +++ b/services/web/server/src/simcore_service_webserver/users/api.py @@ -14,13 +14,17 @@ get_user_primary_group_id, get_user_role, get_users_in_group, + is_user_in_product, set_user_as_deleted, update_expired_users, ) __all__: tuple[str, ...] = ( + "FullNameDict", + "UserDisplayAndIdNamesTuple", "delete_user_without_projects", "get_guest_user_ids_and_names", + "get_user", "get_user_credentials", "get_user_display_and_id_names", "get_user_fullname", @@ -29,11 +33,9 @@ "get_user_name_and_email", "get_user_primary_group_id", "get_user_role", - "get_user", "get_users_in_group", + "is_user_in_product", "set_user_as_deleted", "update_expired_users", - "FullNameDict", - "UserDisplayAndIdNamesTuple", ) # nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/users/settings.py b/services/web/server/src/simcore_service_webserver/users/settings.py index 2b6b9f101ac..3800f55d635 100644 --- a/services/web/server/src/simcore_service_webserver/users/settings.py +++ b/services/web/server/src/simcore_service_webserver/users/settings.py @@ -3,7 +3,7 @@ from settings_library.base import BaseCustomSettings from settings_library.utils_service import MixinServiceSettings -from .._constants import APP_SETTINGS_KEY +from ..constants import APP_SETTINGS_KEY class UsersSettings(BaseCustomSettings, MixinServiceSettings): diff --git a/services/web/server/src/simcore_service_webserver/utils_aiohttp.py b/services/web/server/src/simcore_service_webserver/utils_aiohttp.py index bb60b8a1b8f..5a13e108201 100644 --- a/services/web/server/src/simcore_service_webserver/utils_aiohttp.py +++ b/services/web/server/src/simcore_service_webserver/utils_aiohttp.py @@ -14,7 +14,7 @@ from servicelib.rest_constants import RESPONSE_MODEL_POLICY from yarl import URL -from ._constants import INDEX_RESOURCE_NAME +from .constants import INDEX_RESOURCE_NAME _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py b/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py index 8c1117300ca..2266170c5ac 100644 --- a/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py +++ b/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py @@ -1,11 +1,13 @@ +from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from functools import wraps from math import ceil -from typing import Callable, NamedTuple +from typing import Final, NamedTuple from aiohttp.web_exceptions import HTTPTooManyRequests -from common_library.json_serialization import json_dumps +from models_library.rest_error import EnvelopedError, ErrorGet +from servicelib.aiohttp import status class RateLimitSetup(NamedTuple): @@ -13,7 +15,16 @@ class RateLimitSetup(NamedTuple): interval_seconds: float -def global_rate_limit_route(number_of_requests: int, interval_seconds: float): +MSG_TOO_MANY_REQUESTS: Final[str] = ( + "Requests are being made too frequently. Please wait a moment before trying again." +) + + +def global_rate_limit_route( + number_of_requests: int, + interval_seconds: float, + error_msg: str = MSG_TOO_MANY_REQUESTS, +): """ Limits the requests per given interval to this endpoint from all incoming sources. @@ -41,7 +52,7 @@ class _Context: @wraps(decorated_function) async def _wrapper(*args, **kwargs): - utc_now = datetime.utcnow() + utc_now = datetime.now(UTC) utc_now_timestamp = datetime.timestamp(utc_now) # reset counter & first time initialization @@ -61,16 +72,18 @@ async def _wrapper(*args, **kwargs): "Content-Type": "application/json", "Retry-After": f"{retry_after_sec}", }, - text=json_dumps( - { - "error": { - "logs": [{"message": "API rate limit exceeded."}], - "status": HTTPTooManyRequests.status_code, - } - } - ), + text=EnvelopedError( + error=ErrorGet( + message=error_msg, + status=status.HTTP_429_TOO_MANY_REQUESTS, + ) + ).model_dump_json(), ) + assert ( # nosec + HTTPTooManyRequests.status_code == status.HTTP_429_TOO_MANY_REQUESTS + ) + # increase counter and return original function call context.remaining -= 1 return await decorated_function(*args, **kwargs) diff --git a/services/web/server/src/simcore_service_webserver/version_control/_core.py b/services/web/server/src/simcore_service_webserver/version_control/_core.py deleted file mode 100644 index 860d124ce48..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/_core.py +++ /dev/null @@ -1,155 +0,0 @@ -""" - A checkpoint is equivalent to a commit and can be tagged at the same time (*) - - Working copy - - HEAD revision - - (*) This is a concept introduced for the front-end to avoid using - more fine grained concepts as tags and commits directly -""" -import logging -from uuid import UUID - -from aiopg.sa.result import RowProxy -from pydantic import NonNegativeInt, PositiveInt, validate_call - -from .db import VersionControlRepository -from .errors import CleanRequiredError -from .models import Checkpoint, CommitLog, RefID, WorkbenchView - -_logger = logging.getLogger(__name__) - - -async def list_repos( - vc_repo: VersionControlRepository, - *, - offset: NonNegativeInt = 0, - limit: PositiveInt | None = None, -) -> tuple[list[RowProxy], PositiveInt]: - # NOTE: this layer does NOT add much .. why not use vc_repo directly? - repos_rows, total_number_of_repos = await vc_repo.list_repos(offset, limit) - - assert len(repos_rows) <= total_number_of_repos # nosec - return repos_rows, total_number_of_repos - - -async def list_checkpoints( - vc_repo: VersionControlRepository, - project_uuid: UUID, - *, - offset: NonNegativeInt = 0, - limit: PositiveInt | None = None, -) -> tuple[list[Checkpoint], PositiveInt]: - repo_id = await vc_repo.get_repo_id(project_uuid) - if not repo_id: - return [], 0 - - logs: list[CommitLog] - logs, total_number_of_commits = await vc_repo.log( - repo_id, offset=offset, limit=limit - ) - - checkpoints = [Checkpoint.from_commit_log(commit, tags) for commit, tags in logs] - assert len(checkpoints) <= limit if limit else True # nosec - assert total_number_of_commits > 0 # nosec - - return checkpoints, total_number_of_commits - - -async def create_checkpoint( - vc_repo: VersionControlRepository, - project_uuid: UUID, - *, - tag: str, - message: str | None = None, -) -> Checkpoint: - repo_id = await vc_repo.get_repo_id(project_uuid) - if repo_id is None: - repo_id = await vc_repo.init_repo(project_uuid) - - commit_id = await vc_repo.commit(repo_id, tag=tag, message=message) - commit, tags = await vc_repo.get_commit_log(commit_id) - assert commit # nosec - - return Checkpoint.from_commit_log(commit, tags) - - -async def get_checkpoint( - vc_repo: VersionControlRepository, - project_uuid: UUID, - ref_id: RefID, -) -> Checkpoint: - repo_id, commit_id = await vc_repo.as_repo_and_commit_ids(project_uuid, ref_id) - assert repo_id # nosec - - commit, tags = await vc_repo.get_commit_log(commit_id) - return Checkpoint.from_commit_log(commit, tags) - - -async def update_checkpoint( - vc_repo: VersionControlRepository, - project_uuid: UUID, - ref_id: RefID, - *, - message: str | None = None, - tag: str | None = None, -) -> Checkpoint: - repo_id, commit_id = await vc_repo.as_repo_and_commit_ids(project_uuid, ref_id) - - if message is None and tag is None: - _logger.warning( - "Nothing to update. Skipping updating ref %s of %s", ref_id, project_uuid - ) - else: - await vc_repo.update_annotations(repo_id, commit_id, message, tag) - - commit, tags = await vc_repo.get_commit_log(commit_id) - return Checkpoint.from_commit_log(commit, tags) - - -async def checkout_checkpoint( - vc_repo: VersionControlRepository, - project_uuid: UUID, - ref_id: RefID, -) -> Checkpoint: - repo_id, commit_id = await vc_repo.as_repo_and_commit_ids(project_uuid, ref_id) - - # check if working copy has changes, if so, auto commit it - try: - commit_id = await vc_repo.checkout(repo_id, commit_id) - except CleanRequiredError: - _logger.info("Local changes found. Auto-commiting project %s", project_uuid) - await vc_repo.commit(repo_id, message="auto commit") - commit_id = await vc_repo.checkout(repo_id, commit_id) - - commit, tags = await vc_repo.get_commit_log(commit_id) - return Checkpoint.from_commit_log(commit, tags) - - -async def get_workbench( - vc_repo: VersionControlRepository, - project_uuid: UUID, - ref_id: RefID, -) -> WorkbenchView: - repo_id, commit_id = await vc_repo.as_repo_and_commit_ids(project_uuid, ref_id) - - # prefer actual project to snapshot - content = await vc_repo.get_workbench_view(repo_id, commit_id) - return WorkbenchView.model_validate(content) - - -# -# All above with validated arguments -# - -_CONFIG = {"arbitrary_types_allowed": True} - - -list_repos_safe = validate_call(list_repos, config=_CONFIG) # type: ignore -list_checkpoints_safe = validate_call(list_checkpoints, config=_CONFIG) # type: ignore -create_checkpoint_safe = validate_call(create_checkpoint, config=_CONFIG) # type: ignore -get_checkpoint_safe = validate_call(get_checkpoint, config=_CONFIG) # type: ignore -update_checkpoint_safe = validate_call(update_checkpoint, config=_CONFIG) # type: ignore -checkout_checkpoint_safe = validate_call(checkout_checkpoint, config=_CONFIG) # type: ignore -get_workbench_safe = validate_call(get_workbench, config=_CONFIG) # type: ignore diff --git a/services/web/server/src/simcore_service_webserver/version_control/_handlers.py b/services/web/server/src/simcore_service_webserver/version_control/_handlers.py deleted file mode 100644 index a0847ea34ea..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/_handlers.py +++ /dev/null @@ -1,337 +0,0 @@ -import logging - -from aiohttp import web -from models_library.projects import ProjectID -from models_library.rest_pagination import Page, PageQueryParameters -from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, field_validator -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_path_parameters_as, - parse_request_query_parameters_as, -) -from servicelib.rest_constants import RESPONSE_MODEL_POLICY - -from .._meta import API_VTAG as VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import create_url_for_function, envelope_json_response -from ._core import ( - checkout_checkpoint, - create_checkpoint, - get_checkpoint, - get_workbench, - list_checkpoints, - list_repos, - update_checkpoint, -) -from ._handlers_base import handle_request_errors -from .db import VersionControlRepository -from .models import ( - HEAD, - Checkpoint, - CheckpointAnnotations, - CheckpointApiModel, - CheckpointNew, - RefID, - RepoApiModel, - WorkbenchView, - WorkbenchViewApiModel, -) - -_logger = logging.getLogger(__name__) - - -class _CheckpointsPathParam(BaseModel): - project_uuid: ProjectID - ref_id: RefID - - @field_validator("ref_id", mode="before") - @classmethod - def _normalize_refid(cls, v): - if v and v == "HEAD": - return HEAD - return v - - -class _ProjectPathParam(BaseModel): - project_uuid: ProjectID - - -routes = web.RouteTableDef() - - -@routes.get(f"/{VTAG}/repos/projects", name="list_repos") -@login_required -@permission_required("project.read") -@handle_request_errors -async def _list_repos_handler(request: web.Request): - url_for = create_url_for_function(request) - vc_repo = VersionControlRepository.create_from_request(request) - - query_params: PageQueryParameters = parse_request_query_parameters_as( - PageQueryParameters, request - ) - - repos_rows, total_number_of_repos = await list_repos( - vc_repo, offset=query_params.offset, limit=query_params.limit - ) - - assert len(repos_rows) <= query_params.limit # nosec - - # parse and validate - repos_list = [ - RepoApiModel.model_validate( - { - "url": url_for("list_repos"), - **dict(row.items()), - } - ) - for row in repos_rows - ] - - page = Page[RepoApiModel].model_validate( - paginate_data( - chunk=repos_list, - request_url=request.url, - total=total_number_of_repos, - limit=query_params.limit, - offset=query_params.offset, - ) - ) - return web.Response( - text=page.model_dump_json(**RESPONSE_MODEL_POLICY), - content_type="application/json", - ) - - -@routes.post( - f"/{VTAG}/repos/projects/{{project_uuid}}/checkpoints", name="create_checkpoint" -) -@login_required -@permission_required("project.create") -@handle_request_errors -async def _create_checkpoint_handler(request: web.Request): - url_for = create_url_for_function(request) - vc_repo = VersionControlRepository.create_from_request(request) - - path_params = parse_request_path_parameters_as(_ProjectPathParam, request) - _body = CheckpointNew.model_validate(await request.json()) - - checkpoint: Checkpoint = await create_checkpoint( - vc_repo, - project_uuid=path_params.project_uuid, - **_body.model_dump(include={"tag", "message"}), - ) - - data = CheckpointApiModel.model_validate( - { - "url": url_for( - "get_checkpoint", - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ), - **checkpoint.model_dump(), - } - ) - return envelope_json_response(data, status_cls=web.HTTPCreated) - - -@routes.get( - f"/{VTAG}/repos/projects/{{project_uuid}}/checkpoints", name="list_checkpoints" -) -@login_required -@permission_required("project.read") -@handle_request_errors -async def _list_checkpoints_handler(request: web.Request): - url_for = create_url_for_function(request) - vc_repo = VersionControlRepository.create_from_request(request) - - path_params = parse_request_path_parameters_as(_ProjectPathParam, request) - query_params: PageQueryParameters = parse_request_query_parameters_as( - PageQueryParameters, request - ) - - checkpoints: list[Checkpoint] - - checkpoints, total = await list_checkpoints( - vc_repo, - project_uuid=path_params.project_uuid, - offset=query_params.offset, - limit=query_params.limit, - ) - - # parse and validate - checkpoints_list = [ - CheckpointApiModel.model_validate( - { - "url": url_for( - "get_checkpoint", - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ), - **checkpoint.model_dump(), - } - ) - for checkpoint in checkpoints - ] - - page = Page[CheckpointApiModel].model_validate( - paginate_data( - chunk=checkpoints_list, - request_url=request.url, - total=total, - limit=query_params.limit, - offset=query_params.offset, - ) - ) - return web.Response( - text=page.model_dump_json(**RESPONSE_MODEL_POLICY), - content_type="application/json", - ) - - -# includes repos/projects/{project_uuid}/checkpoints/HEAD -@routes.get( - f"/{VTAG}/repos/projects/{{project_uuid}}/checkpoints/{{ref_id}}", - name="get_checkpoint", -) -@login_required -@permission_required("project.read") -@handle_request_errors -async def _get_checkpoint_handler(request: web.Request): - url_for = create_url_for_function(request) - vc_repo = VersionControlRepository.create_from_request(request) - - path_params = parse_request_path_parameters_as(_CheckpointsPathParam, request) - - checkpoint: Checkpoint = await get_checkpoint( - vc_repo, - project_uuid=path_params.project_uuid, - ref_id=path_params.ref_id, - ) - - data = CheckpointApiModel.model_validate( - { - "url": url_for( - "get_checkpoint", - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ), - **checkpoint.model_dump(**RESPONSE_MODEL_POLICY), - } - ) - return envelope_json_response(data) - - -@routes.patch( - f"/{VTAG}/repos/projects/{{project_uuid}}/checkpoints/{{ref_id}}", - name="update_checkpoint", -) -@login_required -@permission_required("project.update") -@handle_request_errors -async def _update_checkpoint_annotations_handler(request: web.Request): - url_for = create_url_for_function(request) - vc_repo = VersionControlRepository.create_from_request(request) - - path_params = parse_request_path_parameters_as(_CheckpointsPathParam, request) - update = await parse_request_body_as(CheckpointAnnotations, request) - - assert isinstance(path_params.ref_id, int) - - checkpoint: Checkpoint = await update_checkpoint( - vc_repo, - project_uuid=path_params.project_uuid, - ref_id=path_params.ref_id, - **update.model_dump(include={"tag", "message"}, exclude_none=True), - ) - - data = CheckpointApiModel.model_validate( - { - "url": url_for( - "get_checkpoint", - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ), - **checkpoint.model_dump(**RESPONSE_MODEL_POLICY), - } - ) - return envelope_json_response(data) - - -@routes.post( - f"/{VTAG}/repos/projects/{{project_uuid}}/checkpoints/{{ref_id}}:checkout", - name="checkout", -) -@login_required -@permission_required("project.create") -@handle_request_errors -async def _checkout_handler(request: web.Request): - url_for = create_url_for_function(request) - vc_repo = VersionControlRepository.create_from_request(request) - - path_params = parse_request_path_parameters_as(_CheckpointsPathParam, request) - - checkpoint: Checkpoint = await checkout_checkpoint( - vc_repo, - project_uuid=path_params.project_uuid, - ref_id=path_params.ref_id, - ) - - data = CheckpointApiModel.model_validate( - { - "url": url_for( - "get_checkpoint", - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ), - **checkpoint.model_dump(**RESPONSE_MODEL_POLICY), - } - ) - return envelope_json_response(data) - - -@routes.get( - f"/{VTAG}/repos/projects/{{project_uuid}}/checkpoints/{{ref_id}}/workbench/view", - name="view_project_workbench", -) -@login_required -@permission_required("project.read") -@handle_request_errors -async def _view_project_workbench_handler(request: web.Request): - url_for = create_url_for_function(request) - vc_repo = VersionControlRepository.create_from_request(request) - - path_params = parse_request_path_parameters_as(_CheckpointsPathParam, request) - - checkpoint: Checkpoint = await get_checkpoint( - vc_repo, - project_uuid=path_params.project_uuid, - ref_id=path_params.ref_id, - ) - - view: WorkbenchView = await get_workbench( - vc_repo, - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ) - - data = WorkbenchViewApiModel.model_validate( - { - # = request.url?? - "url": url_for( - "view_project_workbench", - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ), - "checkpoint_url": url_for( - "get_checkpoint", - project_uuid=path_params.project_uuid, - ref_id=checkpoint.id, - ), - **view.model_dump(**RESPONSE_MODEL_POLICY), - } - ) - - return envelope_json_response(data) diff --git a/services/web/server/src/simcore_service_webserver/version_control/_handlers_base.py b/services/web/server/src/simcore_service_webserver/version_control/_handlers_base.py deleted file mode 100644 index 3424788fafa..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/_handlers_base.py +++ /dev/null @@ -1,52 +0,0 @@ -import logging -from functools import wraps -from typing import Any - -from aiohttp import web -from common_library.json_serialization import json_dumps -from pydantic import ValidationError -from servicelib.aiohttp.typing_extension import Handler - -from ..projects.exceptions import ProjectNotFoundError -from .errors import InvalidParameterError, NoCommitError, NotFoundError - -_logger = logging.getLogger(__name__) - - -def handle_request_errors(handler: Handler) -> Handler: - """ - - required and type validation of path and query parameters - """ - - @wraps(handler) - async def wrapped(request: web.Request): - try: - response: Any = await handler(request) - return response - - except KeyError as err: - # NOTE: handles required request.match_info[*] or request.query[*] - _logger.debug(err, exc_info=True) - raise web.HTTPBadRequest(reason=f"Expected parameter {err}") from err - - except ValidationError as err: - # NOTE: pydantic.validate_arguments parses and validates -> ValidationError - _logger.debug(err, exc_info=True) - raise web.HTTPUnprocessableEntity( - text=json_dumps({"error": err.errors()}), - content_type="application/json", - ) from err - - except (InvalidParameterError, NoCommitError) as err: - raise web.HTTPUnprocessableEntity(reason=str(err)) from err - - except NotFoundError as err: - raise web.HTTPNotFound(reason=str(err)) from err - - except ProjectNotFoundError as err: - _logger.debug(err, exc_info=True) - raise web.HTTPNotFound( - reason=f"Project not found {err.project_uuid} or not accessible. Skipping snapshot" - ) from err - - return wrapped diff --git a/services/web/server/src/simcore_service_webserver/version_control/db.py b/services/web/server/src/simcore_service_webserver/version_control/db.py deleted file mode 100644 index ee884df6e9c..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/db.py +++ /dev/null @@ -1,551 +0,0 @@ -import json -import logging -from types import SimpleNamespace -from typing import Any, cast -from uuid import UUID - -import sqlalchemy as sa -from aiopg.sa import SAConnection -from aiopg.sa.result import RowProxy -from common_library.json_serialization import json_dumps -from models_library.basic_types import SHA1Str -from models_library.projects import ProjectIDStr -from pydantic.types import NonNegativeInt, PositiveInt -from simcore_postgres_database.models.projects import projects -from simcore_postgres_database.models.projects_version_control import ( - projects_vc_branches, - projects_vc_commits, - projects_vc_heads, - projects_vc_repos, - projects_vc_snapshots, - projects_vc_tags, -) -from simcore_postgres_database.utils_aiopg_orm import BaseOrm -from sqlalchemy.dialects.postgresql import insert as pg_insert - -from ..db.base_repository import BaseRepository -from ..projects.models import ProjectProxy -from .errors import ( - CleanRequiredError, - InvalidParameterError, - NoCommitError, - NotFoundError, -) -from .models import HEAD, CommitID, CommitLog, CommitProxy, RefID, RepoProxy, TagProxy -from .vc_changes import compute_workbench_checksum -from .vc_tags import parse_workcopy_project_tag_name - -_logger = logging.getLogger(__name__) - - -class VersionControlRepository(BaseRepository): - """ - db layer to access multiple tables within projects_version_control - """ - - class ReposOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_repos, - connection, - readonly={"id", "created", "modified"}, - ) - - class BranchesOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_branches, - connection, - readonly={"id", "created", "modified"}, - ) - - class CommitsOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_commits, - connection, - readonly={"id", "created", "modified"}, - # pylint: disable=no-member - writeonce={ - c for c in projects_vc_commits.columns.keys() if c != "message" - }, - ) - - class TagsOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_tags, - connection, - readonly={"id", "created", "modified"}, - ) - - class ProjectsOrm(BaseOrm[str]): - def __init__(self, connection: SAConnection): - super().__init__( - projects, - connection, - readonly={"id", "creation_date", "last_change_date"}, - writeonce={"uuid"}, - ) - - class SnapshotsOrm(BaseOrm[str]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_snapshots, - connection, - writeonce={"checksum"}, - ) - - class HeadsOrm(BaseOrm[int]): - def __init__(self, connection: SAConnection): - super().__init__( - projects_vc_heads, - connection, - writeonce={"repo_id"}, - ) - - # ------------ - - async def _get_head_branch( - self, repo_id: int, conn: SAConnection - ) -> RowProxy | None: - if h := await self.HeadsOrm(conn).fetch("head_branch_id", rowid=repo_id): - branch = ( - await self.BranchesOrm(conn) - .set_filter(id=h.head_branch_id) - .fetch("id name head_commit_id") - ) - return branch - return None - - async def _get_HEAD_commit( - self, repo_id: int, conn: SAConnection - ) -> CommitProxy | None: - if branch := await self._get_head_branch(repo_id, conn): - commit = ( - await self.CommitsOrm(conn).set_filter(id=branch.head_commit_id).fetch() - ) - return commit - return None - - async def _fetch_workcopy_project_id( - self, repo_id: int, commit_id: int, conn: SAConnection - ) -> ProjectIDStr: - # commit has a workcopy associated? - found = ( - await self.TagsOrm(conn).set_filter(commit_id=commit_id).fetch_all("name") - ) - for tag in found: - if workcopy_project_id := parse_workcopy_project_tag_name(tag.name): - return ProjectIDStr(workcopy_project_id) - - repo = await self.ReposOrm(conn).set_filter(id=repo_id).fetch("project_uuid") - assert repo # nosec - return cast(ProjectIDStr, repo.project_uuid) - - async def _update_state( - self, repo_id: int, conn: SAConnection - ) -> tuple[RepoProxy, CommitProxy | None, ProjectProxy]: - head_commit: CommitProxy | None = await self._get_HEAD_commit(repo_id, conn) - - # current repo - repo_orm = self.ReposOrm(conn).set_filter(id=repo_id) - returning_cols = "id project_uuid project_checksum modified" - repo = await repo_orm.fetch(returning_cols) - assert repo # nosec - - # fetch working copy - workcopy_project_id = await self._fetch_workcopy_project_id( - repo_id, head_commit.id if head_commit else -1, conn - ) - workcopy_project = ( - await self.ProjectsOrm(conn) - .set_filter(uuid=workcopy_project_id) - .fetch("last_change_date workbench ui uuid") - ) - assert workcopy_project # nosec - - # uses checksum cached in repo table to avoid re-computing checksum - checksum: SHA1Str | None = repo.project_checksum - if not checksum or ( - checksum and repo.modified < workcopy_project.last_change_date - ): - checksum = compute_workbench_checksum(workcopy_project.workbench) - - repo = await repo_orm.update(returning_cols, project_checksum=checksum) - assert repo - return repo, head_commit, workcopy_project - - @staticmethod - async def _upsert_snapshot( - project_checksum: str, - project: RowProxy | SimpleNamespace, - conn: SAConnection, - ): - # has changes wrt previous commit - assert project_checksum # nosec - insert_stmt = pg_insert(projects_vc_snapshots).values( - checksum=project_checksum, - content={ - "workbench": json.loads(json_dumps(project.workbench)), - "ui": json.loads(json_dumps(project.ui)), - }, - ) - upsert_snapshot = insert_stmt.on_conflict_do_update( - constraint=projects_vc_snapshots.primary_key, - set_=dict(content=insert_stmt.excluded.content), - ) - await conn.execute(upsert_snapshot) - - # PUBLIC - - async def list_repos( - self, - offset: NonNegativeInt = 0, - limit: PositiveInt | None = None, - ) -> tuple[list[RowProxy], NonNegativeInt]: - async with self.engine.acquire() as conn: - repo_orm = self.ReposOrm(conn) - - rows: list[RowProxy] - rows, total_count = await repo_orm.fetch_page( - "project_uuid", offset=offset, limit=limit - ) - - return rows, total_count - - async def get_repo_id(self, project_uuid: UUID) -> int | None: - async with self.engine.acquire() as conn: - repo_orm = self.ReposOrm(conn).set_filter(project_uuid=str(project_uuid)) - repo = await repo_orm.fetch("id") - return int(repo.id) if repo else None - - async def init_repo(self, project_uuid: UUID) -> int: - async with self.engine.acquire() as conn: - async with conn.begin(): - # create repo - repo_orm = self.ReposOrm(conn) - repo_id = await repo_orm.insert(project_uuid=str(project_uuid)) - assert repo_id is not None # nosec - assert isinstance(repo_id, int) # nosec - - repo = await repo_orm.fetch(rowid=repo_id) - assert repo # nosec - - # create main branch - branches_orm = self.BranchesOrm(conn) - branch_id = await branches_orm.insert(repo_id=repo.id) - assert branch_id is not None - assert isinstance(branch_id, int) # nosec - - main_branch: RowProxy | None = await branches_orm.fetch(rowid=branch_id) - assert main_branch # nosec - assert main_branch.name == "main" # nosec - - # assign head branch - heads_orm = self.HeadsOrm(conn) - await heads_orm.insert(repo_id=repo.id, head_branch_id=branch_id) - - return repo_id - - async def commit( - self, repo_id: int, tag: str | None = None, message: str | None = None - ) -> int: - """add changes, commits and tags (if tag is not None) - - Message is added to tag if set otherwise to commit - """ - if tag in ["HEAD", HEAD]: - raise InvalidParameterError(name="tag", reason="is a reserved word") - - async with self.engine.acquire() as conn: - # get head branch - branch = await self._get_head_branch(repo_id, conn) - if not branch: - raise NotImplementedError("Detached heads still not implemented") - - _logger.info("On branch %s", branch.name) - - # get head commit - repo, head_commit, workcopy_project = await self._update_state( - repo_id, conn - ) - - if head_commit is None: - previous_checksum = None - commit_id = None - else: - previous_checksum = head_commit.snapshot_checksum - commit_id = head_commit.id - - async with conn.begin(): - # take a snapshot if changes - if repo.project_checksum != previous_checksum: - await self._upsert_snapshot( - repo.project_checksum, workcopy_project, conn - ) - - # commit new snapshot in history - commit_id = await self.CommitsOrm(conn).insert( - repo_id=repo_id, - parent_commit_id=commit_id, - message=message, - snapshot_checksum=repo.project_checksum, - ) - assert commit_id # nosec - - # updates head/branch to this commit - await self.BranchesOrm(conn).set_filter(id=branch.id).update( - head_commit_id=commit_id - ) - - # tag it (again) - if tag: - insert_stmt = pg_insert(projects_vc_tags).values( - repo_id=repo_id, - commit_id=commit_id, - name=tag, - message=message, - hidden=False, - ) - upsert_tag = insert_stmt.on_conflict_do_update( - constraint="repo_tag_uniqueness", - set_=dict(name=insert_stmt.excluded.name), - ) - await conn.execute(upsert_tag) - else: - _logger.info("Nothing to commit, working tree clean") - - assert isinstance(commit_id, int) # nosec - return commit_id - - async def get_commit_log(self, commit_id: int) -> CommitLog: - async with self.engine.acquire() as conn: - commit = await self.CommitsOrm(conn).fetch(rowid=commit_id) - if commit: - assert isinstance(commit, RowProxy) # nosec - - tags: list[TagProxy] = ( - await self.TagsOrm(conn) - .set_filter(commit_id=commit.id, hidden=False) - .fetch_all("name message") - ) - return commit, tags - raise NotFoundError(name="commit", value=commit_id) - - async def log( - self, - repo_id: int, - offset: NonNegativeInt = 0, - limit: PositiveInt | None = None, - ) -> tuple[list[CommitLog], NonNegativeInt]: - async with self.engine.acquire() as conn: - commits_orm = self.CommitsOrm(conn).set_filter(repo_id=repo_id) - tags_orm = self.TagsOrm(conn) - - commits: list[CommitProxy] - commits, total_count = await commits_orm.fetch_page( - offset=offset, - limit=limit, - sort_by=sa.desc(commits_orm.columns["created"]), - ) - - logs = [] - for commit in commits: - tags: list[TagProxy] - tags = await tags_orm.set_filter(commit_id=commit.id).fetch_all() - logs.append((commit, tags)) - - return logs, total_count - - async def update_annotations( - self, - repo_id: int, - commit_id: CommitID, - message: str | None = None, - tag_name: str | None = None, - ): - async with self.engine.acquire() as conn: - async with conn.begin(): - if message: - await self.CommitsOrm(conn).set_filter(id=commit_id).update( - message=message - ) - - if tag_name: - tag = ( - await self.TagsOrm(conn) - .set_filter(repo_id=repo_id, commit_id=commit_id, hidden=False) - .fetch("id") - ) - - if tag: - await self.TagsOrm(conn).set_filter(rowid=tag.id).update( - name=tag_name - ) - - async def as_repo_and_commit_ids( - self, project_uuid: UUID, ref_id: RefID - ) -> tuple[int, CommitID]: - """Translates (project-uuid, ref-id) to (repo-id, commit-id) - - :return: tuple with repo and commit identifiers - """ - async with self.engine.acquire() as conn: - repo = ( - await self.ReposOrm(conn) - .set_filter(project_uuid=str(project_uuid)) - .fetch("id") - ) - commit_id = None - if repo: - if ref_id == HEAD: - commit = await self._get_HEAD_commit(repo.id, conn) - if commit: - commit_id = commit.id - elif isinstance(ref_id, CommitID): - commit_id = ref_id - else: - assert isinstance(ref_id, str) # nosec - # head branch or tag - raise NotImplementedError( - f"WIP: Tag or head branches as ref_id={ref_id}" - ) - - if not commit_id or not repo: - raise NotFoundError( - name="project {project_uuid} reference", value=ref_id - ) - - return repo.id, commit_id - - async def checkout(self, repo_id: int, commit_id: int) -> int: - """checks out working copy of project_uuid to commit ref_id - - :raises RuntimeError: if local copy has changes (i.e. dirty) - :return: commit id - :rtype: int - """ - async with self.engine.acquire() as conn: - repo, head_commit, workcopy_project = await self._update_state( - repo_id, conn - ) - - if head_commit is None: - raise NoCommitError( - details="Cannot checkout without commit changes first" - ) - - # check if working copy has changes, if so, fail - if repo.project_checksum != head_commit.snapshot_checksum: - raise CleanRequiredError( - details="Your local changes would be overwritten by checkout. " - "Cannot checkout without commit changes first." - ) - - # already in head commit - if head_commit.id == commit_id: - return commit_id - - async with conn.begin(): - commit = ( - await self.CommitsOrm(conn) - .set_filter(id=commit_id) - .fetch("snapshot_checksum") - ) - assert commit # nosec - - # restores project snapshot ONLY if main workcopy project - if workcopy_project.uuid == repo.project_uuid: - snapshot = ( - await self.SnapshotsOrm(conn) - .set_filter(commit.snapshot_checksum) - .fetch("content") - ) - assert snapshot # nosec - - await self.ProjectsOrm(conn).set_filter( - uuid=repo.project_uuid - ).update(**snapshot.content) - - # create detached branch that points to (repo_id, commit_id) - # upsert "detached" branch - insert_stmt = ( - pg_insert(projects_vc_branches) - .values( - repo_id=repo_id, - head_commit_id=commit_id, - name=f"{commit_id}-DETACHED", - ) - .returning(projects_vc_branches.c.id) - ) - upsert_tag = insert_stmt.on_conflict_do_update( - constraint="repo_branch_uniqueness", - set_=dict(head_commit_id=insert_stmt.excluded.head_commit_id), - ) - branch_id = await conn.scalar(upsert_tag) - - # updates head - await self.HeadsOrm(conn).set_filter(repo_id=repo_id).update( - head_branch_id=branch_id - ) - - return commit_id - - async def get_snapshot_content( - self, repo_id: int, commit_id: int - ) -> dict[str, Any]: - async with self.engine.acquire() as conn: - if ( - commit := await self.CommitsOrm(conn) - .set_filter(repo_id=repo_id, id=commit_id) - .fetch("snapshot_checksum") - ): - if ( - snapshot := await self.SnapshotsOrm(conn) - .set_filter(checksum=commit.snapshot_checksum) - .fetch("content") - ): - content: dict[str, Any] = snapshot.content - return content - - raise NotFoundError(name="snapshot for commit", value=(repo_id, commit_id)) - - async def get_workbench_view(self, repo_id: int, commit_id: int) -> dict[str, Any]: - async with self.engine.acquire() as conn: - if ( - commit := await self.CommitsOrm(conn) - .set_filter(repo_id=repo_id, id=commit_id) - .fetch("snapshot_checksum") - ): - repo = ( - await self.ReposOrm(conn) - .set_filter(id=repo_id) - .fetch("project_uuid") - ) - assert repo # nosec - - # if snapshot differs from workcopy, then show working copy - workcopy_project_id = await self._fetch_workcopy_project_id( - repo_id, commit_id, conn - ) - - # NOTE: For the moment, all wcopies except for the repo's main workcopy - # (i.e. repo.project_uuid) are READ-ONLY - if workcopy_project_id != repo.project_uuid: - if project := ( - await self.ProjectsOrm(conn) - .set_filter(uuid=workcopy_project_id) - .fetch("workbench ui") - ): - return dict(project.items()) - else: - if ( - snapshot := await self.SnapshotsOrm(conn) - .set_filter(checksum=commit.snapshot_checksum) - .fetch("content") - ): - assert isinstance(snapshot.content, dict) # nosec - return snapshot.content - - raise NotFoundError(name="snapshot for commit", value=(repo_id, commit_id)) diff --git a/services/web/server/src/simcore_service_webserver/version_control/errors.py b/services/web/server/src/simcore_service_webserver/version_control/errors.py deleted file mode 100644 index e0b3dd3ba63..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/errors.py +++ /dev/null @@ -1,29 +0,0 @@ -from ..errors import WebServerBaseError - - -class VersionControlValueError(WebServerBaseError, ValueError): - pass - - -class VersionControlRuntimeError(WebServerBaseError, RuntimeError): - pass - - -class NotFoundError(VersionControlValueError): - msg_template = "Could not find {name} '{value}'" - - -class InvalidParameterError(VersionControlValueError): - msg_template = "Invalid {name}: {reason}" - - -class NoCommitError(VersionControlRuntimeError): - msg_template = "No commit found: {details}" - - -class CleanRequiredError(VersionControlRuntimeError): - msg_template = "Working copy w/o changes (clean) is required: {details}" - - -class UserUndefinedError(VersionControlRuntimeError): - msg_template = "User required but undefined" diff --git a/services/web/server/src/simcore_service_webserver/version_control/models.py b/services/web/server/src/simcore_service_webserver/version_control/models.py deleted file mode 100644 index 505758d53d2..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/models.py +++ /dev/null @@ -1,110 +0,0 @@ -from datetime import datetime -from typing import Annotated, Any, TypeAlias, Union - -from aiopg.sa.result import RowProxy -from models_library.basic_types import SHA1Str -from models_library.projects import ProjectID -from models_library.projects_nodes import Node -from pydantic import ( - BaseModel, - ConfigDict, - Field, - PositiveInt, - StrictBool, - StrictFloat, - StrictInt, -) -from pydantic.networks import HttpUrl - -BuiltinTypes: TypeAlias = Union[StrictBool, StrictInt, StrictFloat, str] - -# alias for readability -# SEE https://pydantic-docs.helpmanual.io/usage/models/#orm-mode-aka-arbitrary-class-instances - -BranchProxy: TypeAlias = RowProxy -CommitProxy: TypeAlias = RowProxy -RepoProxy: TypeAlias = RowProxy -TagProxy: TypeAlias = RowProxy -CommitLog: TypeAlias = tuple[CommitProxy, list[TagProxy]] - - -HEAD = f"{__file__}/ref/HEAD" - -CommitID: TypeAlias = int -BranchID: TypeAlias = int -RefID: TypeAlias = Annotated[CommitID | str, Field(union_mode="left_to_right")] - -CheckpointID: TypeAlias = PositiveInt - - -class Checkpoint(BaseModel): - id: CheckpointID - checksum: SHA1Str - created_at: datetime - tags: tuple[str, ...] - message: str | None = None - parents_ids: tuple[PositiveInt, ...] | None = Field(default=None) - - @classmethod - def from_commit_log(cls, commit: RowProxy, tags: list[RowProxy]) -> "Checkpoint": - return cls( - id=commit.id, - checksum=commit.snapshot_checksum, - tags=tuple(tag.name for tag in tags), - message=commit.message, - parents_ids=(commit.parent_commit_id,) if commit.parent_commit_id else None, - created_at=commit.created, - ) - - -class WorkbenchView(BaseModel): - """A view (i.e. read-only and visual) of the project's workbench""" - - model_config = ConfigDict(from_attributes=True) - - # NOTE: Tmp replacing UUIDS by str due to a problem serializing to json UUID keys - # in the response https://github.com/samuelcolvin/pydantic/issues/2096#issuecomment-814860206 - workbench: dict[str, Node] - ui: dict[str, Any] = {} - - -# API models --------------- - - -class RepoApiModel(BaseModel): - project_uuid: ProjectID - url: HttpUrl - - -class CheckpointApiModel(Checkpoint): - url: HttpUrl - - -class CheckpointNew(BaseModel): - tag: str - message: str | None = None - # new_branch: Optional[str] = None - - -class CheckpointAnnotations(BaseModel): - tag: str | None = None - message: str | None = None - - -class WorkbenchViewApiModel(WorkbenchView): - url: HttpUrl - checkpoint_url: HttpUrl - - -__all__: tuple[str, ...] = ( - "BranchID", - "BranchProxy", - "CheckpointID", - "CommitID", - "CommitLog", - "CommitProxy", - "HEAD", - "RefID", - "RepoProxy", - "TagProxy", -) diff --git a/services/web/server/src/simcore_service_webserver/version_control/plugin.py b/services/web/server/src/simcore_service_webserver/version_control/plugin.py deleted file mode 100644 index a1e31611f43..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/plugin.py +++ /dev/null @@ -1,29 +0,0 @@ -""" An add-on on projects module - - Adds version control to projects - -""" -import logging - -from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup - -from .._constants import APP_SETTINGS_KEY -from . import _handlers - -_logger = logging.getLogger(__name__) - - -@app_module_setup( - __name__, - ModuleCategory.ADDON, - settings_name="WEBSERVER_VERSION_CONTROL", - depends=[ - "simcore_service_webserver.projects", - ], - logger=_logger, -) -def setup_version_control(app: web.Application): - assert app[APP_SETTINGS_KEY].WEBSERVER_VERSION_CONTROL # nosec - - app.add_routes(_handlers.routes) diff --git a/services/web/server/src/simcore_service_webserver/version_control/vc_changes.py b/services/web/server/src/simcore_service_webserver/version_control/vc_changes.py deleted file mode 100644 index cc3559c118b..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/vc_changes.py +++ /dev/null @@ -1,83 +0,0 @@ -""" - -- How to detect that a particular feature/characteristic in an entity has changed over time? - -Feature/charateristics of an entity at a given moment can be "snapshot" and given a hash value -- If the same feature at another moment results in a different hash value, it means that this feature -has changed - - -""" - -from typing import Any -from uuid import UUID, uuid3 - -from models_library.basic_types import SHA1Str -from models_library.projects import ProjectID, ProjectIDStr -from models_library.projects_nodes import Node - -from ..projects.models import ProjectProxy -from ..utils import compute_sha1_on_small_dataset - - -def compute_workbench_checksum(workbench: dict[str, Any]) -> SHA1Str: - # - # NOTE that UI is NOT accounted in the checksum - # - normalized = { - str(k): (Node(**v) if not isinstance(v, Node) else v) - for k, v in workbench.items() - } - - checksum = compute_sha1_on_small_dataset( - { - k: node.model_dump( - exclude_unset=True, - exclude_defaults=True, - exclude_none=True, - include={ - "key", - "version", - "inputs", - "input_nodes", - "outputs", - "output_nodes", - }, - ) - for k, node in normalized.items() - } - ) - return checksum - - -def _eval_checksum(repo, project: ProjectProxy) -> SHA1Str: - # cached checksum of project workcopy - checksum: SHA1Str | None = repo.project_checksum - is_invalid = not checksum or (checksum and repo.modified < project.last_change_date) - if is_invalid: - # invalid -> recompute - checksum = compute_workbench_checksum(project.workbench) - assert checksum # nosec - return checksum - - -def eval_workcopy_project_id( - repo_project_uuid: ProjectID | ProjectIDStr, snapshot_checksum: SHA1Str -) -> ProjectID: - """ - A working copy is a real project associated to a snapshot so it can be operated - as a project resource (e.g. run, save, etc). - - The uuid of the workcopy is a composition of the repo-project uuid and the snapshot-checksum - i.e. all identical snapshots (e.g. different iterations commits) map to the same project workcopy - can avoid re-run - - If a snapshot is identical but associated to two different repos, then it will still be - treated as a separate project to avoid colision between e.g. two users having coincidentaly the same - worbench blueprint. Nonetheless, this could be refined in the future since we could use this - knowledge to reuse results. - """ - if isinstance(repo_project_uuid, str): - repo_project_uuid = UUID(repo_project_uuid) - - return uuid3(repo_project_uuid, snapshot_checksum) diff --git a/services/web/server/src/simcore_service_webserver/version_control/vc_tags.py b/services/web/server/src/simcore_service_webserver/version_control/vc_tags.py deleted file mode 100644 index 5b9d86df791..00000000000 --- a/services/web/server/src/simcore_service_webserver/version_control/vc_tags.py +++ /dev/null @@ -1,15 +0,0 @@ -import re - -from models_library.basic_regex import UUID_RE_BASE -from models_library.projects import ProjectID - - -def compose_workcopy_project_tag_name(workcopy_project_id: ProjectID) -> str: - return f"project:{workcopy_project_id}" - - -def parse_workcopy_project_tag_name(name: str) -> ProjectID | None: - if m := re.match(rf"^project:(?P{UUID_RE_BASE})$", name): - data = m.groupdict() - return ProjectID(data["workcopy_project_id"]) - return None diff --git a/services/web/server/src/simcore_service_webserver/wallets/_api.py b/services/web/server/src/simcore_service_webserver/wallets/_api.py index a9721f5dfe0..dd092e2a39f 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_api.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_api.py @@ -16,7 +16,7 @@ from pydantic import TypeAdapter from ..resource_usage.service import get_wallet_total_available_credits -from ..users import api as users_api +from ..users import api as users_service from ..users import preferences_api as user_preferences_api from ..users.exceptions import UserDefaultWalletNotFoundError from . import _db as db @@ -33,7 +33,7 @@ async def create_wallet( thumbnail: str | None, product_name: ProductName, ) -> WalletGet: - user: dict = await users_api.get_user(app, user_id) + user: dict = await users_service.get_user(app, user_id) wallet_db: WalletDB = await db.create_wallet( app=app, owner=user["primary_gid"], diff --git a/services/web/server/src/simcore_service_webserver/wallets/_db.py b/services/web/server/src/simcore_service_webserver/wallets/_db.py index 98ec51a658c..4d17c742925 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_db.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_db.py @@ -1,8 +1,3 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" import logging from aiohttp import web diff --git a/services/web/server/src/simcore_service_webserver/wallets/_events.py b/services/web/server/src/simcore_service_webserver/wallets/_events.py index 5e881ebdae5..3aea74cdb83 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_events.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_events.py @@ -7,7 +7,7 @@ from pydantic import PositiveInt from servicelib.aiohttp.observer import register_observer, setup_observer_registry -from ..products.api import get_product +from ..products import products_service from ..resource_usage.service import add_credits_to_wallet from ..users import preferences_api from ..users.api import get_user_display_and_id_names @@ -27,7 +27,7 @@ async def _auto_add_default_wallet( app, user_id=user_id, product_name=product_name ): user = await get_user_display_and_id_names(app, user_id=user_id) - product = get_product(app, product_name) + product = products_service.get_product(app, product_name) wallet = await create_wallet( app, diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py index 5a3dcc0a339..05b6625ae5e 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py @@ -8,7 +8,7 @@ from models_library.wallets import UserWalletDB, WalletID from pydantic import BaseModel, ConfigDict -from ..users import api as users_api +from ..users import api as users_service from . import _db as wallets_db from . import _groups_db as wallets_groups_db from ._groups_db import WalletGroupGetDB @@ -87,9 +87,9 @@ async def list_wallet_groups_by_user_and_wallet( ), ) - wallet_groups_db: list[ - WalletGroupGetDB - ] = await wallets_groups_db.list_wallet_groups(app=app, wallet_id=wallet_id) + wallet_groups_db: list[WalletGroupGetDB] = ( + await wallets_groups_db.list_wallet_groups(app=app, wallet_id=wallet_id) + ) wallet_groups_api: list[WalletGroupGet] = [ WalletGroupGet.model_validate(group) for group in wallet_groups_db @@ -103,9 +103,9 @@ async def list_wallet_groups_with_read_access_by_wallet( *, wallet_id: WalletID, ) -> list[WalletGroupGet]: - wallet_groups_db: list[ - WalletGroupGetDB - ] = await wallets_groups_db.list_wallet_groups(app=app, wallet_id=wallet_id) + wallet_groups_db: list[WalletGroupGetDB] = ( + await wallets_groups_db.list_wallet_groups(app=app, wallet_id=wallet_id) + ) wallet_groups_api: list[WalletGroupGet] = [ WalletGroupGet.model_validate(group) @@ -135,7 +135,7 @@ async def update_wallet_group( reason=f"User does not have write access to wallet {wallet_id}" ) if wallet.owner == group_id: - user: dict = await users_api.get_user(app, user_id) + user: dict = await users_service.get_user(app, user_id) if user["primary_gid"] != wallet.owner: # Only the owner of the wallet can modify the owner group raise WalletAccessForbiddenError( @@ -177,7 +177,7 @@ async def delete_wallet_group( reason=f"User does not have delete access to wallet {wallet_id}" ) if wallet.owner == group_id: - user: dict = await users_api.get_user(app, user_id) + user: dict = await users_service.get_user(app, user_id) if user["primary_gid"] != wallet.owner: # Only the owner of the wallet can delete the owner group raise WalletAccessForbiddenError( diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py index 8c2148e05ce..c7e24fff4b8 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py @@ -1,8 +1,3 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" import logging from datetime import datetime diff --git a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py index 9afcdb7c437..22d085b90f1 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py @@ -21,9 +21,9 @@ from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.request_keys import RQT_USERID_KEY -from .._constants import RQ_PRODUCT_KEY from .._meta import API_VTAG as VTAG from ..application_settings_utils import requires_dev_feature_enabled +from ..constants import RQ_PRODUCT_KEY from ..login.decorators import login_required from ..payments.errors import ( InvalidPaymentMethodError, @@ -97,7 +97,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: except BillingDetailsNotFoundError as exc: error_code = create_error_code(exc) - user_error_msg = f"{MSG_BILLING_DETAILS_NOT_DEFINED_ERROR} [{error_code}]" + user_error_msg = MSG_BILLING_DETAILS_NOT_DEFINED_ERROR _logger.exception( **create_troubleshotting_log_kwargs( @@ -155,10 +155,10 @@ async def create_wallet(request: web.Request): async def list_wallets(request: web.Request): req_ctx = WalletsRequestContext.model_validate(request) - wallets: list[ - WalletGetWithAvailableCredits - ] = await _api.list_wallets_with_available_credits_for_user( - app=request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name + wallets: list[WalletGetWithAvailableCredits] = ( + await _api.list_wallets_with_available_credits_for_user( + app=request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name + ) ) return envelope_json_response(wallets) diff --git a/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py index 66c73b5a293..2751abc457e 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py @@ -12,7 +12,6 @@ ReplaceWalletAutoRecharge, WalletPaymentInitiated, ) -from models_library.products import CreditResultGet from models_library.rest_pagination import Page, PageQueryParameters from models_library.rest_pagination_utils import paginate_data from servicelib.aiohttp import status @@ -24,6 +23,7 @@ ) from servicelib.logging_utils import get_log_record_extra, log_context from servicelib.utils import fire_and_forget_task +from simcore_service_webserver.products._models import CreditResult from .._meta import API_VTAG as VTAG from ..login.decorators import login_required @@ -42,7 +42,7 @@ pay_with_payment_method, replace_wallet_payment_autorecharge, ) -from ..products.api import get_credit_amount +from ..products import products_service from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from ._handlers import ( @@ -79,7 +79,7 @@ async def _create_payment(request: web.Request): log_duration=True, extra=get_log_record_extra(user_id=req_ctx.user_id), ): - credit_result: CreditResultGet = await get_credit_amount( + credit_result: CreditResult = await products_service.get_credit_amount( request.app, dollar_amount=body_params.price_dollars, product_name=req_ctx.product_name, @@ -351,7 +351,7 @@ async def _pay_with_payment_method(request: web.Request): log_duration=True, extra=get_log_record_extra(user_id=req_ctx.user_id), ): - credit_result: CreditResultGet = await get_credit_amount( + credit_result: CreditResult = await products_service.get_credit_amount( request.app, dollar_amount=body_params.price_dollars, product_name=req_ctx.product_name, @@ -420,7 +420,7 @@ async def _get_wallet_autorecharge(request: web.Request): ) # NOTE: just to check that top_up is under limit. Guaranteed by _validate_prices_in_product_settings - assert await get_credit_amount( # nosec + assert await products_service.get_credit_amount( # nosec request.app, dollar_amount=auto_recharge.top_up_amount_in_usd, product_name=req_ctx.product_name, @@ -441,7 +441,7 @@ async def _replace_wallet_autorecharge(request: web.Request): path_params = parse_request_path_parameters_as(WalletsPathParams, request) body_params = await parse_request_body_as(ReplaceWalletAutoRecharge, request) - await get_credit_amount( + await products_service.get_credit_amount( request.app, dollar_amount=body_params.top_up_amount_in_usd, product_name=req_ctx.product_name, diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py b/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py index a94ec063f15..05d962a30d5 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py @@ -18,7 +18,7 @@ from pydantic import BaseModel, BeforeValidator, ConfigDict, Field from servicelib.request_keys import RQT_USERID_KEY -from ..._constants import RQ_PRODUCT_KEY +from ...constants import RQ_PRODUCT_KEY _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_repository.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_repository.py index d14127d5b37..5f3ab1963ba 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_repository.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_repository.py @@ -1,9 +1,3 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" - import logging from datetime import datetime diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py index 37737e73590..37c84b3682a 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py @@ -8,9 +8,9 @@ from models_library.workspaces import UserWorkspaceWithAccessRights, WorkspaceID from pydantic import BaseModel, ConfigDict -from ..users import api as users_api +from ..users import api as users_service from . import _groups_repository as workspaces_groups_db -from . import _workspaces_repository as workspaces_db +from . import _workspaces_repository as workspaces_workspaces_repository from ._groups_repository import WorkspaceGroupGetDB from ._workspaces_service import check_user_workspace_access from .errors import WorkspaceAccessForbiddenError @@ -80,10 +80,10 @@ async def list_workspace_groups_by_user_and_workspace( permission="read", ) - workspace_groups_db: list[ - WorkspaceGroupGetDB - ] = await workspaces_groups_db.list_workspace_groups( - app=app, workspace_id=workspace_id + workspace_groups_db: list[WorkspaceGroupGetDB] = ( + await workspaces_groups_db.list_workspace_groups( + app=app, workspace_id=workspace_id + ) ) workspace_groups_api: list[WorkspaceGroupGet] = [ @@ -98,10 +98,10 @@ async def list_workspace_groups_with_read_access_by_workspace( *, workspace_id: WorkspaceID, ) -> list[WorkspaceGroupGet]: - workspace_groups_db: list[ - WorkspaceGroupGetDB - ] = await workspaces_groups_db.list_workspace_groups( - app=app, workspace_id=workspace_id + workspace_groups_db: list[WorkspaceGroupGetDB] = ( + await workspaces_groups_db.list_workspace_groups( + app=app, workspace_id=workspace_id + ) ) workspace_groups_api: list[WorkspaceGroupGet] = [ @@ -125,7 +125,7 @@ async def update_workspace_group( product_name: ProductName, ) -> WorkspaceGroupGet: workspace: UserWorkspaceWithAccessRights = ( - await workspaces_db.get_workspace_for_user( + await workspaces_workspaces_repository.get_workspace_for_user( app=app, user_id=user_id, workspace_id=workspace_id, @@ -137,7 +137,7 @@ async def update_workspace_group( reason=f"User does not have write access to workspace {workspace_id}" ) if workspace.owner_primary_gid == group_id: - user: dict = await users_api.get_user(app, user_id) + user: dict = await users_service.get_user(app, user_id) if user["primary_gid"] != workspace.owner_primary_gid: # Only the owner of the workspace can modify the owner group raise WorkspaceAccessForbiddenError( @@ -169,9 +169,9 @@ async def delete_workspace_group( group_id: GroupID, product_name: ProductName, ) -> None: - user: dict = await users_api.get_user(app, user_id=user_id) + user: dict = await users_service.get_user(app, user_id=user_id) workspace: UserWorkspaceWithAccessRights = ( - await workspaces_db.get_workspace_for_user( + await workspaces_workspaces_repository.get_workspace_for_user( app=app, user_id=user_id, workspace_id=workspace_id, diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_trash_rest.py b/services/web/server/src/simcore_service_webserver/workspaces/_trash_rest.py index fd7b708c1dd..41776bc57a6 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_trash_rest.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_trash_rest.py @@ -9,7 +9,7 @@ from .._meta import API_VTAG as VTAG from ..login.decorators import get_user_id, login_required -from ..products.api import get_product_name +from ..products import products_web from ..security.decorators import permission_required from . import _trash_services from ._common.exceptions_handlers import handle_plugin_requests_exceptions @@ -27,7 +27,7 @@ @handle_plugin_requests_exceptions async def trash_workspace(request: web.Request): user_id = get_user_id(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) path_params = parse_request_path_parameters_as(WorkspacesPathParams, request) query_params: WorkspaceTrashQueryParams = parse_request_query_parameters_as( WorkspaceTrashQueryParams, request @@ -50,7 +50,7 @@ async def trash_workspace(request: web.Request): @handle_plugin_requests_exceptions async def untrash_workspace(request: web.Request): user_id = get_user_id(request) - product_name = get_product_name(request) + product_name = products_web.get_product_name(request) path_params = parse_request_path_parameters_as(WorkspacesPathParams, request) await _trash_services.untrash_workspace( diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py index e62efae0a10..63f014bb85d 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py @@ -1,9 +1,3 @@ -""" Database API - - - Adds a layer to the postgres API with a focus on the projects comments - -""" - import logging from typing import cast diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py index e87dc72d054..e96e7937eb4 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py @@ -13,7 +13,7 @@ ) from pydantic import NonNegativeInt -from ..projects._db_utils import PermissionStr +from ..projects._projects_repository_legacy_utils import PermissionStr from ..users.api import get_user from . import _workspaces_repository as db from .errors import WorkspaceAccessForbiddenError diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 2ff0c5c2a0a..a833d401b4d 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -41,7 +41,9 @@ from simcore_service_webserver.projects._crud_api_create import ( OVERRIDABLE_DOCUMENT_KEYS, ) -from simcore_service_webserver.projects._groups_db import update_or_insert_project_group +from simcore_service_webserver.projects._groups_repository import ( + update_or_insert_project_group, +) from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.utils import to_datetime from tenacity.asyncio import AsyncRetrying diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index 62075ff6ba0..43b89e073b8 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -41,7 +41,9 @@ from simcore_service_webserver.groups.api import add_user_in_group from simcore_service_webserver.login.plugin import setup_login from simcore_service_webserver.projects._crud_api_delete import get_scheduled_tasks -from simcore_service_webserver.projects._groups_db import update_or_insert_project_group +from simcore_service_webserver.projects._groups_repository import ( + update_or_insert_project_group, +) from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.projects.plugin import setup_projects from simcore_service_webserver.resource_manager.plugin import setup_resource_manager @@ -198,7 +200,7 @@ async def _fake_background_task(app: web.Application): await asyncio.sleep(0.1) return mocker.patch( - "simcore_service_webserver.garbage_collector.plugin.run_background_task", + "simcore_service_webserver.garbage_collector.plugin._tasks_core.run_background_task", side_effect=_fake_background_task, ) diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index c6575d80e21..a66e1e4bec6 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -62,7 +62,7 @@ def webserver_environ( # version tha loads only the subsystems under test. For that reason, # the test webserver is built-up in webserver_service fixture that runs # on the host. - EXCLUDED_SERVICES = ["dask-scheduler", "director"] + EXCLUDED_SERVICES = ["dask-scheduler", "director", "sto-worker"] services_with_published_ports = [ name for name in core_services diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py index b322655c20c..4c6dd952f46 100644 --- a/services/web/server/tests/unit/conftest.py +++ b/services/web/server/tests/unit/conftest.py @@ -10,10 +10,10 @@ from collections.abc import Callable, Iterable from pathlib import Path from typing import Any -from unittest.mock import MagicMock import pytest import yaml +from pytest_mock import MockFixture, MockType from pytest_simcore.helpers.webserver_projects import empty_project_data from simcore_service_webserver.application_settings_utils import AppConfigDict @@ -62,7 +62,7 @@ def activity_data(fake_data_dir: Path) -> Iterable[dict[str, Any]]: @pytest.fixture -def mock_orphaned_services(mocker) -> MagicMock: +def mock_orphaned_services(mocker: MockFixture) -> MockType: return mocker.patch( "simcore_service_webserver.garbage_collector._core.remove_orphaned_services", return_value="", @@ -70,9 +70,19 @@ def mock_orphaned_services(mocker) -> MagicMock: @pytest.fixture -def disable_gc_manual_guest_users(mocker): +def disable_gc_manual_guest_users(mocker: MockFixture) -> None: """Disable to avoid an almost instant cleanup of GUEST users with their projects""" mocker.patch( "simcore_service_webserver.garbage_collector._core.remove_users_manually_marked_as_guests", return_value=None, ) + + +@pytest.fixture +def disabled_setup_garbage_collector(mocker: MockFixture) -> MockType: + # WARNING: add it BEFORE `client` to have effect + return mocker.patch( + "simcore_service_webserver.application.setup_garbage_collector", + autospec=True, + return_value=False, + ) diff --git a/services/web/server/tests/unit/isolated/conftest.py b/services/web/server/tests/unit/isolated/conftest.py index 77a4b7ca567..eccad058e53 100644 --- a/services/web/server/tests/unit/isolated/conftest.py +++ b/services/web/server/tests/unit/isolated/conftest.py @@ -102,6 +102,8 @@ def mock_env_devel_environment( monkeypatch, envs={ "WEBSERVER_DEV_FEATURES_ENABLED": "1", + "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT": "null", + "TRACING_OPENTELEMETRY_COLLECTOR_PORT": "null", }, ) @@ -251,7 +253,7 @@ def mocked_login_required(mocker: MockerFixture): ) mocker.patch( - "simcore_service_webserver.login.decorators.get_product_name", + "simcore_service_webserver.login.decorators.products_web.get_product_name", spec=True, return_value="osparc", ) diff --git a/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py b/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py index 4a9bc655df2..20d47155339 100644 --- a/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py +++ b/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py @@ -42,6 +42,7 @@ "progress_type": ProgressType.SERVICE_OUTPUTS_PULLING.value, "progress_report": { "actual_value": 0.4, + "attempt": 0, "total": 1.0, "unit": None, "message": None, @@ -65,6 +66,7 @@ "progress_type": ProgressType.PROJECT_CLOSING.value, "progress_report": { "actual_value": 0.4, + "attempt": 0, "total": 1.0, "unit": None, "message": None, diff --git a/services/web/server/tests/unit/isolated/products/conftest.py b/services/web/server/tests/unit/isolated/products/conftest.py new file mode 100644 index 00000000000..8fe754e9307 --- /dev/null +++ b/services/web/server/tests/unit/isolated/products/conftest.py @@ -0,0 +1,48 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import json +import re +from typing import Any + +import pytest +from faker import Faker +from models_library.products import ProductName +from pytest_simcore.helpers.faker_factories import random_product +from simcore_postgres_database.models.products import products as products_table +from simcore_service_webserver.constants import FRONTEND_APP_DEFAULT +from sqlalchemy import String +from sqlalchemy.dialects import postgresql + + +@pytest.fixture(scope="session") +def product_name() -> ProductName: + return ProductName(FRONTEND_APP_DEFAULT) + + +@pytest.fixture +def product_db_server_defaults() -> dict[str, Any]: + server_defaults = {} + for c in products_table.columns: + if c.server_default is not None: + if isinstance(c.type, String): + server_defaults[c.name] = c.server_default.arg + elif isinstance(c.type, postgresql.JSONB): + m = re.match(r"^'(.+)'::jsonb$", c.server_default.arg.text) + if m: + server_defaults[c.name] = json.loads(m.group(1)) + return server_defaults + + +@pytest.fixture +def fake_product_from_db( + faker: Faker, product_name: ProductName, product_db_server_defaults: dict[str, Any] +) -> dict[str, Any]: + return random_product( + name=product_name, + fake=faker, + **product_db_server_defaults, + ) diff --git a/services/web/server/tests/unit/isolated/test_products_middlewares.py b/services/web/server/tests/unit/isolated/products/test_products_middlewares.py similarity index 74% rename from services/web/server/tests/unit/isolated/test_products_middlewares.py rename to services/web/server/tests/unit/isolated/products/test_products_middlewares.py index 8dbf517492d..08dc4f6e013 100644 --- a/services/web/server/tests/unit/isolated/test_products_middlewares.py +++ b/services/web/server/tests/unit/isolated/products/test_products_middlewares.py @@ -8,43 +8,44 @@ import pytest from aiohttp import web from aiohttp.test_utils import make_mocked_request +from faker import Faker +from pytest_simcore.helpers.faker_factories import random_product from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER -from simcore_postgres_database.models.products import LOGIN_SETTINGS_DEFAULT -from simcore_postgres_database.webserver_models import products -from simcore_service_webserver.products._events import _set_app_state -from simcore_service_webserver.products._middlewares import discover_product_middleware -from simcore_service_webserver.products._model import Product -from simcore_service_webserver.products.api import get_product_name +from simcore_service_webserver.products import products_web +from simcore_service_webserver.products._web_events import _set_app_state +from simcore_service_webserver.products._web_middlewares import ( + discover_product_middleware, +) +from simcore_service_webserver.products.models import Product from simcore_service_webserver.statics._constants import FRONTEND_APP_DEFAULT from yarl import URL -@pytest.fixture() -def mock_postgres_product_table(): - # NOTE: try here your product's host_regex before adding them in the database! - column_defaults: dict[str, Any] = { - c.name: f"{c.server_default.arg}" for c in products.columns if c.server_default - } - - column_defaults["login_settings"] = LOGIN_SETTINGS_DEFAULT +@pytest.fixture +def mock_product_db_get_data( + faker: Faker, product_db_server_defaults: dict[str, Any] +) -> list[dict[str, Any]]: _SUBDOMAIN_PREFIX = r"[\w-]+\." return [ - dict( + random_product( name="osparc", host_regex=rf"^({_SUBDOMAIN_PREFIX})*osparc[\.-]", - **column_defaults, + fake=faker, + **product_db_server_defaults, ), - dict( + random_product( name="s4l", host_regex=rf"^({_SUBDOMAIN_PREFIX})*(s4l|sim4life)[\.-]", - **column_defaults, + fake=faker, + **product_db_server_defaults, ), - dict( + random_product( name="tis", host_regex=rf"^({_SUBDOMAIN_PREFIX})*(tis|^ti-solutions)[\.-]", + fake=faker, vendor={ "name": "ACME", "address": "sesame street", @@ -52,18 +53,20 @@ def mock_postgres_product_table(): "url": "https://acme.com", "forum_url": "https://forum.acme.com", }, - **column_defaults, + **product_db_server_defaults, ), ] @pytest.fixture -def mock_app(mock_postgres_product_table: dict[str, Any]) -> web.Application: +def mock_app(mock_product_db_get_data: list[dict[str, Any]]) -> web.Application: app = web.Application() app_products: dict[str, Product] = { - entry["name"]: Product(**entry) for entry in mock_postgres_product_table + product_db_get["name"]: Product.model_validate(product_db_get) + for product_db_get in mock_product_db_get_data } + default_product_name = next(iter(app_products.keys())) _set_app_state(app, app_products, default_product_name) @@ -124,5 +127,5 @@ async def _mock_handler(_request: web.Request): response = await discover_product_middleware(mock_request, _mock_handler) # checks - assert get_product_name(mock_request) == expected_product + assert products_web.get_product_name(mock_request) == expected_product assert response.status == status.HTTP_200_OK diff --git a/services/web/server/tests/unit/isolated/products/test_products_model.py b/services/web/server/tests/unit/isolated/products/test_products_model.py new file mode 100644 index 00000000000..291383be932 --- /dev/null +++ b/services/web/server/tests/unit/isolated/products/test_products_model.py @@ -0,0 +1,187 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import re +from typing import Any + +import pytest +import simcore_service_webserver.products +import sqlalchemy as sa +from faker import Faker +from models_library.basic_regex import TWILIO_ALPHANUMERIC_SENDER_ID_RE +from models_library.products import ProductName +from pydantic import BaseModel, ValidationError +from pytest_simcore.helpers.faker_factories import random_product +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) +from simcore_postgres_database.models.products import products as products_table +from simcore_service_webserver.products.models import Product + + +@pytest.mark.parametrize( + "model_cls, example_name, example_data", + walk_model_examples_in_package(simcore_service_webserver.products), +) +def test_all_products_models_examples( + model_cls: type[BaseModel], example_name: str, example_data: Any +): + model_instance = assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) + + # Some extra checks for Products + if isinstance(model_instance, Product): + assert model_instance.to_statics() + if "registration_email_template" in example_data: + assert model_instance.get_template_name_for("registration_email.jinja2") + + +def test_product_to_static(): + + product = Product.model_validate(Product.model_json_schema()["examples"][0]) + assert product.to_statics() == { + "displayName": "o²S²PARC", + "supportEmail": "support@osparc.io", + } + + product = Product.model_validate(Product.model_json_schema()["examples"][2]) + + assert product.to_statics() == { + "displayName": "o²S²PARC FOO", + "supportEmail": "foo@osparcf.io", + "vendor": { + "copyright": "© ACME correcaminos", + "name": "ACME", + "url": "https://acme.com", + "license_url": "https://acme.com/license", + "invitation_form": True, + }, + "issues": [ + { + "label": "github", + "login_url": "https://github.com/ITISFoundation/osparc-simcore", + "new_url": "https://github.com/ITISFoundation/osparc-simcore/issues/new/choose", + }, + { + "label": "fogbugz", + "login_url": "https://fogbugz.com/login", + "new_url": "https://fogbugz.com/new?project=123", + }, + ], + "manuals": [ + {"label": "main", "url": "doc.acme.com"}, + {"label": "z43", "url": "yet-another-manual.acme.com"}, + ], + "support": [ + {"kind": "forum", "label": "forum", "url": "forum.acme.com"}, + {"kind": "email", "label": "email", "email": "more-support@acme.com"}, + {"kind": "web", "label": "web-form", "url": "support.acme.com"}, + ], + "isPaymentEnabled": False, + } + + +def test_product_host_regex_with_spaces(): + data = Product.model_json_schema()["examples"][2] + + # with leading and trailing spaces and uppercase (tests anystr_strip_whitespace ) + data["support_email"] = " fOO@BaR.COM " + + # with leading trailing spaces (tests validator("host_regex", pre=True)) + expected = r"([\.-]{0,1}osparc[\.-])".strip() + data["host_regex"] = expected + " " + + # parsing should strip all whitespaces and normalize email + product = Product.model_validate(data) + + assert product.host_regex.pattern == expected + assert product.host_regex.search("osparc.bar.com") + + assert product.support_email == "foo@bar.com" + + +def test_safe_load_empty_blanks_on_string_cols_from_db( + fake_product_from_db: dict[str, Any] +): + nullable_strings_column_names = [ + c.name + for c in products_table.columns + if isinstance(c.type, sa.String) and c.nullable + ] + + fake_product_from_db.update( + {name: " " * len(name) for name in nullable_strings_column_names} + ) + + product = Product.model_validate(fake_product_from_db) + + assert product.model_dump(include=set(nullable_strings_column_names)) == { + name: None for name in nullable_strings_column_names + } + + +@pytest.mark.parametrize( + "expected_product_name", + [ + "osparc", + "s4l", + "s4lacad", + "s4ldesktop", + "s4ldesktopacad", + "s4lengine", + "s4llite", + "tiplite", + "tis", + ], +) +def test_product_name_needs_front_end( + faker: Faker, + expected_product_name: ProductName, + product_db_server_defaults: dict[str, Any], +): + product_from_db = random_product( + name=expected_product_name, + fake=faker, + **product_db_server_defaults, + ) + product = Product.model_validate(product_from_db) + assert product.name == expected_product_name + + +def test_product_name_invalid(fake_product_from_db: dict[str, Any]): + # Test with an invalid name + fake_product_from_db.update(name="invalid name") + with pytest.raises(ValidationError): + Product.model_validate(fake_product_from_db) + + +def test_twilio_sender_id_is_truncated(fake_product_from_db: dict[str, Any]): + fake_product_from_db.update(short_name=None, display_name="very long name" * 12) + product = Product.model_validate(fake_product_from_db) + + assert re.match( + TWILIO_ALPHANUMERIC_SENDER_ID_RE, product.twilio_alpha_numeric_sender_id + ) + + +def test_template_names_from_file(fake_product_from_db: dict[str, Any]): + fake_product_from_db.update(registration_email_template="some_template_name_id") + product = Product.model_validate(fake_product_from_db) + + assert ( + product.get_template_name_for(filename="registration_email.jinja2") + == "some_template_name_id" + ) + assert product.get_template_name_for(filename="other_template.jinja2") is None + + fake_product_from_db.update(registration_email_template=None) + product = Product.model_validate(fake_product_from_db) + assert ( + product.get_template_name_for(filename="registration_email_template.jinja2") + is None + ) diff --git a/services/web/server/tests/unit/isolated/test_application_settings.py b/services/web/server/tests/unit/isolated/test_application_settings.py index 5aae772d2d9..7ead40a3c27 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings.py +++ b/services/web/server/tests/unit/isolated/test_application_settings.py @@ -3,13 +3,16 @@ # pylint:disable=no-name-in-module import json +from typing import Annotated import pytest from aiohttp import web from common_library.json_serialization import json_dumps -from pydantic import HttpUrl, TypeAdapter +from pydantic import Field, HttpUrl, TypeAdapter +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_webserver.application_settings import ( + _X_DEV_FEATURE_FLAG, APP_SETTINGS_KEY, ApplicationSettings, setup_settings, @@ -61,19 +64,24 @@ def test_settings_to_client_statics(app_settings: ApplicationSettings): # special alias assert statics["stackName"] == "master-simcore" - assert statics["pluginsDisabled"] == [] + assert statics["pluginsDisabled"] == [ + "WEBSERVER_META_MODELING", + "WEBSERVER_VERSION_CONTROL", + ] def test_settings_to_client_statics_plugins( mock_webserver_service_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch ): - disable_plugins = {"WEBSERVER_EXPORTER", "WEBSERVER_SCICRUNCH"} + disable_plugins = { + "WEBSERVER_EXPORTER", + "WEBSERVER_SCICRUNCH", + "WEBSERVER_META_MODELING", + "WEBSERVER_VERSION_CONTROL", + } for name in disable_plugins: monkeypatch.setenv(name, "null") - monkeypatch.setenv("WEBSERVER_VERSION_CONTROL", "0") - disable_plugins.add("WEBSERVER_VERSION_CONTROL") - monkeypatch.setenv("WEBSERVER_FOLDERS", "0") disable_plugins.add("WEBSERVER_FOLDERS") @@ -84,7 +92,7 @@ def test_settings_to_client_statics_plugins( assert settings.WEBSERVER_LOGIN - assert statics["webserverLicenses"] == settings.WEBSERVER_LICENSES + assert "webserverLicenses" not in statics assert ( statics["webserverLogin"]["LOGIN_ACCOUNT_DELETION_RETENTION_DAYS"] @@ -106,29 +114,34 @@ def test_settings_to_client_statics_plugins( @pytest.mark.parametrize("is_dev_feature_enabled", [True, False]) -@pytest.mark.parametrize( - "plugin_name", - ["WEBSERVER_META_MODELING", "WEBSERVER_VERSION_CONTROL"], - # NOTE: this is the list in _enable_only_if_dev_features_allowed -) def test_disabled_plugins_settings_to_client_statics( is_dev_feature_enabled: bool, mock_webserver_service_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, - plugin_name: str, ): - monkeypatch.setenv( - "WEBSERVER_DEV_FEATURES_ENABLED", f"{is_dev_feature_enabled}".lower() + setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_DEV_FEATURES_ENABLED": f"{is_dev_feature_enabled}".lower(), + "TEST_FOO": "1", + "TEST_BAR": "42", + }, ) - settings = ApplicationSettings.create_from_envs() - statics = settings.to_client_statics() + class DevSettings(ApplicationSettings): + TEST_FOO: Annotated[bool, Field(json_schema_extra={_X_DEV_FEATURE_FLAG: True})] + TEST_BAR: Annotated[ + int | None, Field(json_schema_extra={_X_DEV_FEATURE_FLAG: True}) + ] + + settings = DevSettings.create_from_envs() - # checks whether it is shown to the front-end depending on the value of WEBSERVER_DEV_FEATURES_ENABLED if is_dev_feature_enabled: - assert plugin_name not in set(statics["pluginsDisabled"]) + assert settings.TEST_FOO is True + assert settings.TEST_BAR == 42 else: - assert plugin_name in set(statics["pluginsDisabled"]) + assert settings.TEST_FOO is False + assert settings.TEST_BAR is None @pytest.mark.filterwarnings("error") @@ -138,3 +151,37 @@ def test_avoid_sensitive_info_in_public(app_settings: ApplicationSettings): assert not any("token" in key for key in app_settings.public_dict()) assert not any("secret" in key for key in app_settings.public_dict()) assert not any("private" in key for key in app_settings.public_dict()) + + +def test_backwards_compatibility_with_bool_env_vars_turned_into_objects( + monkeypatch: pytest.MonkeyPatch, + mock_webserver_service_environment: EnvVarsDict, +): + # Sometimes we turn `WEBSERVER_VAR: bool` into `WEBSERVER_VAR: VarSettings` + with monkeypatch.context() as patch: + patch.setenv("WEBSERVER_LICENSES", "true") + + settings = ApplicationSettings.create_from_envs() + assert settings.WEBSERVER_LICENSES is True + + with monkeypatch.context() as patch: + patch.setenv("WEBSERVER_LICENSES", "{}") + patch.setenv("LICENSES_ITIS_VIP_SYNCER_ENABLED", "1") + patch.setenv("LICENSES_ITIS_VIP_API_URL", "https://some-api/{category}") + patch.setenv( + "LICENSES_ITIS_VIP_CATEGORIES", + '{"HumanWholeBody": "Humans", "HumanBodyRegion": "Humans (Region)", "AnimalWholeBody": "Animal"}', + ) + + settings = ApplicationSettings.create_from_envs() + assert settings.WEBSERVER_LICENSES is not None + assert not isinstance(settings.WEBSERVER_LICENSES, bool) + assert settings.WEBSERVER_LICENSES.LICENSES_ITIS_VIP + assert settings.WEBSERVER_LICENSES.LICENSES_ITIS_VIP.LICENSES_ITIS_VIP_API_URL + assert settings.WEBSERVER_LICENSES.LICENSES_ITIS_VIP_SYNCER_ENABLED + + with monkeypatch.context() as patch: + patch.setenv("WEBSERVER_LICENSES", "null") + + settings = ApplicationSettings.create_from_envs() + assert settings.WEBSERVER_LICENSES is None diff --git a/services/web/server/tests/unit/isolated/test_catalog_models.py b/services/web/server/tests/unit/isolated/test_catalog_models.py index ec82b0ab367..2a3fb5ad3d8 100644 --- a/services/web/server/tests/unit/isolated/test_catalog_models.py +++ b/services/web/server/tests/unit/isolated/test_catalog_models.py @@ -9,8 +9,10 @@ import pytest from pint import UnitRegistry from pytest_benchmark.fixture import BenchmarkFixture -from simcore_service_webserver.catalog._api_units import replace_service_input_outputs -from simcore_service_webserver.catalog._handlers import RESPONSE_MODEL_POLICY +from simcore_service_webserver.catalog._controller_rest import RESPONSE_MODEL_POLICY +from simcore_service_webserver.catalog._units_service import ( + replace_service_input_outputs, +) @pytest.fixture(params=["UnitRegistry", None]) diff --git a/services/web/server/tests/unit/isolated/test_catalog_setup.py b/services/web/server/tests/unit/isolated/test_catalog_setup.py index 2fdd2e336ef..f16efc1695e 100644 --- a/services/web/server/tests/unit/isolated/test_catalog_setup.py +++ b/services/web/server/tests/unit/isolated/test_catalog_setup.py @@ -9,7 +9,7 @@ from aiohttp.test_utils import TestClient from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver._meta import api_version_prefix -from simcore_service_webserver.catalog.client import to_backend_service +from simcore_service_webserver.catalog import catalog_service from simcore_service_webserver.catalog.plugin import setup_catalog from yarl import URL @@ -35,6 +35,8 @@ def test_url_translation(): assert rel_url.path.startswith(f"/{api_version_prefix}/catalog") api_target_origin = URL("http://catalog:8000") - api_target_url = to_backend_service(rel_url, api_target_origin, "v5") + api_target_url = catalog_service.to_backend_service( + rel_url, api_target_origin, "v5" + ) assert str(api_target_url) == "http://catalog:8000/v5/dags/123?page_size=6" diff --git a/services/web/server/tests/unit/isolated/test_catalog_api_units.py b/services/web/server/tests/unit/isolated/test_catalog_units_service.py similarity index 98% rename from services/web/server/tests/unit/isolated/test_catalog_api_units.py rename to services/web/server/tests/unit/isolated/test_catalog_units_service.py index 39d1824a775..3fa04b3a933 100644 --- a/services/web/server/tests/unit/isolated/test_catalog_api_units.py +++ b/services/web/server/tests/unit/isolated/test_catalog_units_service.py @@ -9,7 +9,7 @@ from models_library.function_services_catalog.services import demo_units from models_library.services import ServiceInput, ServiceOutput from pint import UnitRegistry -from simcore_service_webserver.catalog._api_units import can_connect +from simcore_service_webserver.catalog._units_service import can_connect def _create_port_data(schema: dict[str, Any]): diff --git a/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py b/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py index 01626715b48..924f5d55575 100644 --- a/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py +++ b/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py @@ -19,8 +19,8 @@ from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application -from simcore_service_webserver._constants import APP_SETTINGS_KEY from simcore_service_webserver.application_settings import setup_settings +from simcore_service_webserver.constants import APP_SETTINGS_KEY from simcore_service_webserver.diagnostics._healthcheck import ( HEALTH_LATENCY_PROBE, HealthCheckError, diff --git a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py index 5205f7fa4da..b944b0d93c1 100644 --- a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py +++ b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py @@ -24,9 +24,9 @@ from simcore_service_webserver.resource_manager.registry import UserSessionDict from simcore_service_webserver.users.exceptions import UserNotFoundError -MODULE_GC_CORE_ORPHANS: Final[ - str -] = "simcore_service_webserver.garbage_collector._core_orphans" +MODULE_GC_CORE_ORPHANS: Final[str] = ( + "simcore_service_webserver.garbage_collector._core_orphans" +) @pytest.fixture @@ -91,7 +91,7 @@ async def mock_is_node_id_present_in_any_project_workbench( @pytest.fixture async def mock_list_dynamic_services(mocker: MockerFixture) -> mock.AsyncMock: return mocker.patch( - f"{MODULE_GC_CORE_ORPHANS}.dynamic_scheduler_api.list_dynamic_services", + f"{MODULE_GC_CORE_ORPHANS}.dynamic_scheduler_service.list_dynamic_services", autospec=True, return_value=[], ) @@ -100,7 +100,7 @@ async def mock_list_dynamic_services(mocker: MockerFixture) -> mock.AsyncMock: @pytest.fixture async def mock_stop_dynamic_service(mocker: MockerFixture) -> mock.AsyncMock: return mocker.patch( - f"{MODULE_GC_CORE_ORPHANS}.dynamic_scheduler_api.stop_dynamic_service", + f"{MODULE_GC_CORE_ORPHANS}.dynamic_scheduler_service.stop_dynamic_service", autospec=True, ) diff --git a/services/web/server/tests/unit/isolated/test_licenses_settings.py b/services/web/server/tests/unit/isolated/test_licenses_settings.py new file mode 100644 index 00000000000..6205f16c05f --- /dev/null +++ b/services/web/server/tests/unit/isolated/test_licenses_settings.py @@ -0,0 +1,32 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import datetime + +import pytest +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_webserver.licenses.settings import LicensesSettings + + +def test_itis_vip_syncer_settings( + mock_webserver_service_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +): + + assert "LICENSES_ITIS_VIP_SYNCER_ENABLED" in mock_webserver_service_environment + assert "LICENSES_ITIS_VIP_SYNCER_PERIODICITY" in mock_webserver_service_environment + + settings = LicensesSettings.create_from_envs() + assert settings + + with monkeypatch.context() as patch: + patch.setenv("LICENSES_ITIS_VIP_SYNCER_PERIODICITY", "1D02:03:04") + + settings: LicensesSettings = LicensesSettings.create_from_envs() + assert settings + assert settings.LICENSES_ITIS_VIP_SYNCER_PERIODICITY == datetime.timedelta( + days=1, hours=2, minutes=3, seconds=4 + ) diff --git a/services/web/server/tests/unit/isolated/test_products_model.py b/services/web/server/tests/unit/isolated/test_products_model.py deleted file mode 100644 index 147540adce6..00000000000 --- a/services/web/server/tests/unit/isolated/test_products_model.py +++ /dev/null @@ -1,100 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - - -from typing import Any - -import pytest -from common_library.json_serialization import json_dumps -from pydantic import BaseModel -from simcore_service_webserver.products._db import Product - - -@pytest.mark.parametrize( - "model_cls", - [ - Product, - ], -) -def test_product_examples( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] -): - for name, example in model_cls_examples.items(): - print(name, ":", json_dumps(example, indent=1)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" - - if isinstance(model_instance, Product): - assert model_instance.to_statics() - - if "registration_email_template" in example: - assert model_instance.get_template_name_for("registration_email.jinja2") - - -def test_product_to_static(): - - product = Product.model_validate( - Product.model_config["json_schema_extra"]["examples"][0] - ) - assert product.to_statics() == { - "displayName": "o²S²PARC", - "supportEmail": "support@osparc.io", - } - - product = Product.model_validate( - Product.model_config["json_schema_extra"]["examples"][2] - ) - - assert product.to_statics() == { - "displayName": "o²S²PARC FOO", - "supportEmail": "foo@osparcf.io", - "vendor": { - "copyright": "© ACME correcaminos", - "name": "ACME", - "url": "https://acme.com", - "license_url": "https://acme.com/license", - "invitation_form": True, - }, - "issues": [ - { - "label": "github", - "login_url": "https://github.com/ITISFoundation/osparc-simcore", - "new_url": "https://github.com/ITISFoundation/osparc-simcore/issues/new/choose", - }, - { - "label": "fogbugz", - "login_url": "https://fogbugz.com/login", - "new_url": "https://fogbugz.com/new?project=123", - }, - ], - "manuals": [ - {"label": "main", "url": "doc.acme.com"}, - {"label": "z43", "url": "yet-another-manual.acme.com"}, - ], - "support": [ - {"kind": "forum", "label": "forum", "url": "forum.acme.com"}, - {"kind": "email", "label": "email", "email": "more-support@acme.com"}, - {"kind": "web", "label": "web-form", "url": "support.acme.com"}, - ], - "isPaymentEnabled": False, - } - - -def test_product_host_regex_with_spaces(): - data = Product.model_config["json_schema_extra"]["examples"][2] - - # with leading and trailing spaces and uppercase (tests anystr_strip_whitespace ) - data["support_email"] = " fOO@BaR.COM " - - # with leading trailing spaces (tests validator("host_regex", pre=True)) - expected = r"([\.-]{0,1}osparc[\.-])".strip() - data["host_regex"] = expected + " " - - # parsing should strip all whitespaces and normalize email - product = Product.model_validate(data) - - assert product.host_regex.pattern == expected - assert product.host_regex.search("osparc.bar.com") - - assert product.support_email == "foo@bar.com" diff --git a/services/web/server/tests/unit/isolated/test_projects__db_utils.py b/services/web/server/tests/unit/isolated/test_projects__db_utils.py index 2a5203ae137..06631e73a4b 100644 --- a/services/web/server/tests/unit/isolated/test_projects__db_utils.py +++ b/services/web/server/tests/unit/isolated/test_projects__db_utils.py @@ -16,11 +16,7 @@ from models_library.projects_nodes import Node from models_library.services import ServiceKey from models_library.utils.fastapi_encoders import jsonable_encoder -from simcore_service_webserver.projects._db_utils import ( - DB_EXCLUSIVE_COLUMNS, - SCHEMA_NON_NULL_KEYS, -) -from simcore_service_webserver.projects.db import ( +from simcore_service_webserver.projects._projects_repository_legacy import ( ProjectAccessRights, assemble_array_groups, convert_to_db_names, @@ -29,6 +25,10 @@ patch_workbench, update_workbench, ) +from simcore_service_webserver.projects._projects_repository_legacy_utils import ( + DB_EXCLUSIVE_COLUMNS, + SCHEMA_NON_NULL_KEYS, +) from simcore_service_webserver.projects.exceptions import ( NodeNotFoundError, ProjectInvalidUsageError, diff --git a/services/web/server/tests/unit/isolated/test_projects__nodes_api.py b/services/web/server/tests/unit/isolated/test_projects__nodes_api.py index e7e4bd8a926..610575306e6 100644 --- a/services/web/server/tests/unit/isolated/test_projects__nodes_api.py +++ b/services/web/server/tests/unit/isolated/test_projects__nodes_api.py @@ -2,8 +2,8 @@ from uuid import uuid4 import pytest -from models_library.api_schemas_storage import FileMetaDataGet -from simcore_service_webserver.projects._nodes_api import ( +from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet +from simcore_service_webserver.projects._nodes_service import ( _SUPPORTED_PREVIEW_FILE_EXTENSIONS, _FileWithThumbnail, _get_files_with_thumbnails, diff --git a/services/web/server/tests/unit/isolated/test_rest.py b/services/web/server/tests/unit/isolated/test_rest.py index 31fdba39eac..335350c5468 100644 --- a/services/web/server/tests/unit/isolated/test_rest.py +++ b/services/web/server/tests/unit/isolated/test_rest.py @@ -59,7 +59,7 @@ async def test_frontend_config( assert client.app # avoids having to start database etc... mocker.patch( - "simcore_service_webserver.rest._handlers.get_product_name", + "simcore_service_webserver.rest._handlers.products_web.get_product_name", spec=True, return_value="osparc", ) diff --git a/services/web/server/tests/unit/isolated/test_security_api.py b/services/web/server/tests/unit/isolated/test_security_api.py index b913d95e5a7..dd10eb4fee5 100644 --- a/services/web/server/tests/unit/isolated/test_security_api.py +++ b/services/web/server/tests/unit/isolated/test_security_api.py @@ -25,9 +25,11 @@ from simcore_postgres_database.models.users import UserRole from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.login.decorators import login_required -from simcore_service_webserver.products._events import _set_app_state -from simcore_service_webserver.products._middlewares import discover_product_middleware -from simcore_service_webserver.products._model import Product +from simcore_service_webserver.products._web_events import _set_app_state +from simcore_service_webserver.products._web_middlewares import ( + discover_product_middleware, +) +from simcore_service_webserver.products.models import Product from simcore_service_webserver.security.api import ( check_user_authorized, clean_auth_policy_cache, diff --git a/services/web/server/tests/unit/isolated/test_storage_schemas.py b/services/web/server/tests/unit/isolated/test_storage_schemas.py index 31ea4260bb4..e3a9174fce7 100644 --- a/services/web/server/tests/unit/isolated/test_storage_schemas.py +++ b/services/web/server/tests/unit/isolated/test_storage_schemas.py @@ -3,13 +3,15 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments -import json from typing import Any import pytest import simcore_service_webserver.storage.schemas from pydantic import BaseModel -from pytest_simcore.pydantic_models import iter_model_examples_in_module +from pytest_simcore.pydantic_models import ( + assert_validation_model, + iter_model_examples_in_module, +) @pytest.mark.parametrize( @@ -17,7 +19,8 @@ iter_model_examples_in_module(simcore_service_webserver.storage.schemas), ) def test_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - print(example_name, ":", json.dumps(example_data)) - assert model_cls.model_validate(example_data) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py index aa9f3c44e5b..9a182352efa 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py @@ -48,7 +48,6 @@ def app_environment( "WEBSERVER_GARBAGE_COLLECTOR": "null", "WEBSERVER_GROUPS": "1", "WEBSERVER_LOGIN": "null", - "WEBSERVER_META_MODELING": "0", "WEBSERVER_PAYMENTS": "null", "WEBSERVER_PRODUCTS": "1", "WEBSERVER_PUBLICATIONS": "0", @@ -59,7 +58,6 @@ def app_environment( "WEBSERVER_SOCKETIO": "0", "WEBSERVER_TAGS": "1", "WEBSERVER_TRACING": "null", - "WEBSERVER_VERSION_CONTROL": "0", "WEBSERVER_WALLETS": "0", "STUDIES_ACCESS_ANONYMOUS_ALLOWED": "1", }, diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index e568d0d2ddd..e61f543e211 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -1,11 +1,10 @@ +# pylint: disable=protected-access # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments -from copy import deepcopy from datetime import UTC, datetime -from pprint import pformat from typing import Any import pytest @@ -16,40 +15,12 @@ MyProfilePrivacyGet, ) from models_library.generics import Envelope -from models_library.users import UserThirdPartyToken from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import BaseModel from servicelib.rest_constants import RESPONSE_MODEL_POLICY -from simcore_postgres_database.models.users import UserRole +from simcore_postgres_database import utils_users from simcore_service_webserver.users._common.models import ToUserUpdateDB -@pytest.mark.parametrize( - "model_cls", - [MyProfileGet, UserThirdPartyToken], -) -def test_user_models_examples( - model_cls: type[BaseModel], model_cls_examples: dict[str, Any] -): - for name, example in model_cls_examples.items(): - print(name, ":", pformat(example)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" - - model_enveloped = Envelope[model_cls].from_data( - model_instance.model_dump(by_alias=True) - ) - model_array_enveloped = Envelope[list[model_cls]].from_data( - [ - model_instance.model_dump(by_alias=True), - model_instance.model_dump(by_alias=True), - ] - ) - - assert model_enveloped.error is None - assert model_array_enveloped.error is None - - @pytest.fixture def fake_profile_get(faker: Faker) -> MyProfileGet: fake_profile: dict[str, Any] = faker.simple_profile() @@ -62,7 +33,9 @@ def fake_profile_get(faker: Faker) -> MyProfileGet: user_name=fake_profile["username"], login=fake_profile["mail"], role="USER", - privacy=MyProfilePrivacyGet(hide_fullname=True, hide_email=True), + privacy=MyProfilePrivacyGet( + hide_fullname=True, hide_email=True, hide_username=False + ), preferences={}, ) @@ -98,18 +71,6 @@ def test_auto_compute_gravatar__deprecated(fake_profile_get: MyProfileGet): assert data["preferences"] == profile.preferences -@pytest.mark.parametrize("user_role", [u.name for u in UserRole]) -def test_profile_get_role(user_role: str): - for example in MyProfileGet.model_json_schema()["examples"]: - data = deepcopy(example) - data["role"] = user_role - m1 = MyProfileGet(**data) - - data["role"] = UserRole(user_role) - m2 = MyProfileGet(**data) - assert m1 == m2 - - def test_parsing_output_of_get_user_profile(): result_from_db_query_and_composition = { "id": 1, @@ -119,7 +80,7 @@ def test_parsing_output_of_get_user_profile(): "last_name": "", "role": "Guest", "gravatar_id": "9d5e02c75fcd4bce1c8861f219f7f8a5", - "privacy": {"hide_email": True, "hide_fullname": False}, + "privacy": {"hide_email": True, "hide_fullname": False, "hide_username": False}, "groups": { "me": { "gid": 2, @@ -166,7 +127,7 @@ def test_mapping_update_models_from_rest_to_db(): { "first_name": "foo", "userName": "foo1234", - "privacy": {"hideFullname": False}, + "privacy": {"hideFullname": False, "hideUsername": True}, } ) @@ -178,4 +139,14 @@ def test_mapping_update_models_from_rest_to_db(): "first_name": "foo", "name": "foo1234", "privacy_hide_fullname": False, + "privacy_hide_username": True, } + + +def test_utils_user_generates_valid_myprofile_patch(): + username = utils_users._generate_username_from_email("xi@email.com") # noqa: SLF001 + + MyProfilePatch.model_validate({"userName": username}) + MyProfilePatch.model_validate( + {"userName": utils_users.generate_alternative_username(username)} + ) diff --git a/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py b/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py index c6da8adf736..1389e9a154d 100644 --- a/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py +++ b/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py @@ -81,13 +81,15 @@ async def test_global_rate_limit_route(requests_per_second: float, client: TestC msg = [] for i, task in enumerate(tasks): + while not task.done(): await asyncio.sleep(0.01) + assert not task.cancelled() assert not task.exception() msg.append( ( - "request # %2d" % i, + f"request # {i:2d}", f"status={task.result().status}", f"retry-after={task.result().headers.get('Retry-After')}", ) diff --git a/services/web/server/tests/unit/with_dbs/01/storage/conftest.py b/services/web/server/tests/unit/with_dbs/01/storage/conftest.py new file mode 100644 index 00000000000..051a2d23423 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/01/storage/conftest.py @@ -0,0 +1,320 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import logging +import random +from collections.abc import Iterator +from pathlib import Path +from threading import Thread +from typing import Annotated +from urllib.parse import quote + +import pytest +import uvicorn +from faker import Faker +from fastapi import APIRouter, Depends, FastAPI, Request, status +from fastapi_pagination import add_pagination, create_page +from fastapi_pagination.cursor import CursorPage, CursorParams +from models_library.api_schemas_storage.storage_schemas import ( + DatasetMetaDataGet, + FileLocation, + FileMetaDataGet, + FileMetaDataGetv010, + FileUploadCompleteResponse, + FileUploadCompletionBody, + FileUploadSchema, + LinkType, + PathMetaDataGet, +) +from models_library.generics import Envelope +from models_library.projects import ProjectID +from models_library.projects_nodes_io import LocationID, StorageFileID +from models_library.users import UserID +from pydantic import AnyUrl, TypeAdapter +from pytest_simcore.helpers.logging_tools import log_context +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from servicelib.utils import unused_port +from yarl import URL + + +@pytest.fixture(scope="session") +def storage_vtag() -> str: + return "v9" + + +@pytest.fixture(scope="module") +def fake_storage_app(storage_vtag: str) -> FastAPI: # noqa: C901 + app = FastAPI(debug=True) + add_pagination(app) + + router = APIRouter( + prefix=f"/{storage_vtag}", + ) + + @router.get("/") + async def _root(request: Request): + return {"message": "Hello World"} + + @router.get( + "/locations", + status_code=status.HTTP_200_OK, + response_model=Envelope[list[FileLocation]], + ) + async def _list_storage_locations(user_id: UserID, request: Request): + assert "json_schema_extra" in FileLocation.model_config + + return Envelope[list[FileLocation]]( + data=[ + FileLocation.model_validate(e) + for e in FileLocation.model_json_schema()["examples"] + ] + ) + + @router.get( + "/locations/{location_id}/paths", + response_model=CursorPage[PathMetaDataGet], + ) + async def _list_paths( + page_params: Annotated[CursorParams, Depends()], + # dsm: Annotated[BaseDataManager, Depends(get_data_manager)], + user_id: UserID, + file_filter: Path | None = None, + ): + assert user_id + assert "json_schema_extra" in PathMetaDataGet.model_config + + example_index = len(file_filter.parts) if file_filter else 0 + assert example_index < len( + PathMetaDataGet.model_json_schema()["examples"] + ), "fake server unable to server this example" + chosen_example = PathMetaDataGet.model_json_schema()["examples"][example_index] + + return create_page( + random.randint(3, 15) + * [PathMetaDataGet.model_validate(chosen_example)], # noqa: S311 + params=page_params, + next_=None, + ) + + @router.get( + "/locations/{location_id}/files/metadata", + response_model=Envelope[list[FileMetaDataGet]], + ) + async def _list_files_metadata( + user_id: UserID, + request: Request, + uuid_filter: str = "", + project_id: ProjectID | None = None, + expand_dirs: bool = True, + ): + assert "json_schema_extra" in FileMetaDataGet.model_config + + if uuid_filter: + return Envelope[list[FileMetaDataGet]]( + data=random.sample( + [ + FileMetaDataGet.model_validate(e) + for e in FileMetaDataGet.model_json_schema()["examples"] + ], + 2, + ) + ) + return Envelope[list[FileMetaDataGet]]( + data=[ + FileMetaDataGet.model_validate(e) + for e in FileMetaDataGet.model_json_schema()["examples"] + ] + ) + + @router.get( + "/locations/{location_id}/files/{file_id:path}/metadata", + response_model=Envelope[FileMetaDataGet] + | Envelope[FileMetaDataGetv010] + | Envelope[dict], + ) + async def _get_file_metadata(user_id: UserID, request: Request): + assert "json_schema_extra" in FileMetaDataGet.model_config + + return Envelope[FileMetaDataGet]( + data=random.choice( # noqa: S311 + [ + FileMetaDataGet.model_validate(e) + for e in FileMetaDataGet.model_json_schema()["examples"] + ] + ) + ) + + @router.get( + "/locations/{location_id}/datasets", + response_model=Envelope[list[DatasetMetaDataGet]], + ) + async def _list_datasets_metadata(user_id: UserID, request: Request): + assert "json_schema_extra" in DatasetMetaDataGet.model_config + + return Envelope[list[DatasetMetaDataGet]]( + data=[ + DatasetMetaDataGet.model_validate(e) + for e in DatasetMetaDataGet.model_json_schema()["examples"] + ] + ) + + @router.get( + "/locations/{location_id}/datasets/{dataset_id}/metadata", + response_model=Envelope[list[FileMetaDataGet]], + ) + async def _list_dataset_files_metadata(user_id: UserID, request: Request): + assert "json_schema_extra" in FileMetaDataGet.model_config + + return Envelope[list[FileMetaDataGet]]( + data=[ + FileMetaDataGet.model_validate(e) + for e in FileMetaDataGet.model_json_schema()["examples"] + ] + ) + + @router.put( + "/locations/{location_id}/files/{file_id:path}", + response_model=Envelope[FileUploadSchema], + ) + async def upload_file( + user_id: UserID, + location_id: LocationID, + file_id: StorageFileID, + request: Request, + link_type: LinkType = LinkType.PRESIGNED, + ): + assert "json_schema_extra" in FileUploadSchema.model_config + + abort_url = ( + URL(f"{request.url}") + .with_path( + quote( + request.app.url_path_for( + "abort_upload_file", + location_id=f"{location_id}", + file_id=file_id, + ), + safe=":/", + ), + encoded=True, + ) + .with_query(user_id=user_id) + ) + + complete_url = ( + URL(f"{request.url}") + .with_path( + quote( + request.app.url_path_for( + "complete_upload_file", + location_id=f"{location_id}", + file_id=file_id, + ), + safe=":/", + ), + encoded=True, + ) + .with_query(user_id=user_id) + ) + response = FileUploadSchema.model_validate( + random.choice( # noqa: S311 + FileUploadSchema.model_json_schema()["examples"] + ) + ) + response.links.abort_upload = TypeAdapter(AnyUrl).validate_python( + f"{abort_url}" + ) + response.links.complete_upload = TypeAdapter(AnyUrl).validate_python( + f"{complete_url}" + ) + + return Envelope[FileUploadSchema](data=response) + + @router.post( + "/locations/{location_id}/files/{file_id:path}:complete", + response_model=Envelope[FileUploadCompleteResponse], + status_code=status.HTTP_202_ACCEPTED, + ) + async def complete_upload_file( + user_id: UserID, + location_id: LocationID, + file_id: StorageFileID, + body: FileUploadCompletionBody, + request: Request, + ): ... + + @router.post( + "/locations/{location_id}/files/{file_id:path}:abort", + status_code=status.HTTP_204_NO_CONTENT, + ) + async def abort_upload_file( + user_id: UserID, + location_id: LocationID, + file_id: StorageFileID, + request: Request, + ): ... + + app.include_router(router) + + return app + + +@pytest.fixture(scope="module") +def fake_storage_server( + storage_vtag: str, + fake_storage_app: FastAPI, + # app_environment: EnvVarsDict, +) -> Iterator[URL]: + storage_port = unused_port() + with log_context( + logging.INFO, + msg=f"with fake storage server on 127.0.0.1:{storage_port}/{storage_vtag}", + ) as ctx: + config = uvicorn.Config( + fake_storage_app, + host="127.0.0.1", + port=storage_port, + log_level="error", + ) + server = uvicorn.Server(config) + + thread = Thread(target=server.run) + thread.daemon = True + thread.start() + + ctx.logger.info( + "health at : %s", + f"http://127.0.0.1:{storage_port}/{storage_vtag}", + ) + + yield URL(f"http://127.0.0.1:{storage_port}") + + server.should_exit = True + thread.join(timeout=10) + + +@pytest.fixture +def app_environment( + storage_vtag: str, + fake_storage_server: URL, + app_environment: dict[str, str], + monkeypatch: pytest.MonkeyPatch, +) -> dict[str, str]: + # NOTE: overrides app_environment + + return app_environment | setenvs_from_dict( + monkeypatch, + { + "STORAGE_PORT": f"{fake_storage_server.port}", + "STORAGE_VTAG": storage_vtag, + "WEBSERVER_DB_LISTENER": "0", + "WEBSERVER_GARBAGE_COLLECTOR": "null", + }, + ) + + +@pytest.fixture +def location_id(faker: Faker) -> LocationID: + return TypeAdapter(LocationID).validate_python(faker.pyint(min_value=0)) diff --git a/services/web/server/tests/unit/with_dbs/01/storage/test_storage.py b/services/web/server/tests/unit/with_dbs/01/storage/test_storage.py new file mode 100644 index 00000000000..3ea1ec40230 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/01/storage/test_storage.py @@ -0,0 +1,642 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from collections.abc import Callable +from pathlib import Path +from typing import Any, Final +from urllib.parse import quote + +import pytest +from aiohttp.test_utils import TestClient +from faker import Faker +from fastapi_pagination.cursor import CursorPage +from models_library.api_schemas_long_running_tasks.tasks import ( + TaskGet, + TaskResult, + TaskStatus, +) +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobAbort, + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import ( + JobAbortedError, + JobError, + JobMissingError, + JobNotDoneError, + JobSchedulerError, +) +from models_library.api_schemas_storage.data_export_async_jobs import ( + AccessRightError, + InvalidFileIdentifierError, +) +from models_library.api_schemas_storage.storage_schemas import ( + DatasetMetaDataGet, + FileLocation, + FileMetaDataGet, + FileUploadSchema, + PathMetaDataGet, +) +from models_library.api_schemas_webserver._base import OutputSchema +from models_library.api_schemas_webserver.storage import ( + DataExportPost, +) +from models_library.generics import Envelope +from models_library.progress_bar import ProgressReport +from models_library.projects_nodes_io import LocationID, StorageFileID +from pydantic import TypeAdapter +from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs +from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( + submit, +) +from servicelib.rabbitmq.rpc_interfaces.storage.data_export import start_data_export +from simcore_postgres_database.models.users import UserRole +from yarl import URL + +API_VERSION = "v0" + + +PREFIX = "/" + API_VERSION + "/storage" + +_faker = Faker() +_user_roles: Final[list[UserRole]] = [ + UserRole.GUEST, + UserRole.USER, + UserRole.TESTER, + UserRole.PRODUCT_OWNER, + UserRole.ADMIN, +] + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_list_storage_locations( + client: TestClient, + logged_user: dict[str, Any], + expected: int, +): + url = "/v0/storage/locations" + assert url.startswith(PREFIX) + + resp = await client.get(url) + data, error = await assert_status(resp, expected) + + if not error: + assert "json_schema_extra" in FileLocation.model_config + + assert len(data) == len(FileLocation.model_json_schema()["examples"]) + assert data == FileLocation.model_json_schema()["examples"] + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_list_storage_paths( + client: TestClient, + logged_user: dict[str, Any], + expected: int, + location_id: LocationID, +): + assert client.app + url = client.app.router["list_storage_paths"].url_for(location_id=f"{location_id}") + + resp = await client.get(f"{url}") + data, error = await assert_status(resp, expected) + if not error: + TypeAdapter(CursorPage[PathMetaDataGet]).validate_python(data) + + +_faker = Faker() + + +@pytest.fixture +def create_storage_paths_rpc_client_mock( + mocker: MockerFixture, +) -> Callable[[str, Any], None]: + def _(method: str, result_or_exception: Any): + def side_effect(*args, **kwargs): + if isinstance(result_or_exception, Exception): + raise result_or_exception + + return result_or_exception + + for fct in (f"servicelib.rabbitmq.rpc_interfaces.storage.paths.{method}",): + mocker.patch(fct, side_effect=side_effect) + + return _ + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_202_ACCEPTED), + (UserRole.USER, status.HTTP_202_ACCEPTED), + (UserRole.TESTER, status.HTTP_202_ACCEPTED), + ], +) +@pytest.mark.parametrize( + "backend_result_or_exception", + [ + AsyncJobGet(job_id=AsyncJobId(f"{_faker.uuid4()}")), + ], + ids=lambda x: type(x).__name__, +) +async def test_compute_path_size( + client: TestClient, + logged_user: dict[str, Any], + expected: int, + location_id: LocationID, + faker: Faker, + create_storage_paths_rpc_client_mock: Callable[[str, Any], None], + backend_result_or_exception: Any, +): + create_storage_paths_rpc_client_mock( + submit.__name__, + backend_result_or_exception, + ) + + assert client.app + url = client.app.router["compute_path_size"].url_for( + location_id=f"{location_id}", + path=quote(faker.file_path(absolute=False), safe=""), + ) + + resp = await client.post(f"{url}") + data, error = await assert_status(resp, expected) + if not error: + TypeAdapter(TaskGet).validate_python(data) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_list_datasets_metadata( + client: TestClient, + logged_user: dict[str, Any], + expected: int, +): + url = "/v0/storage/locations/0/datasets" + assert url.startswith(PREFIX) + assert client.app + _url = client.app.router["list_datasets_metadata"].url_for(location_id="0") + + assert url == str(_url) + + resp = await client.get(url) + data, error = await assert_status(resp, expected) + + if not error: + assert "json_schema_extra" in DatasetMetaDataGet.model_config + + assert len(data) == len(DatasetMetaDataGet.model_json_schema()["examples"]) + assert data == DatasetMetaDataGet.model_json_schema()["examples"] + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_list_dataset_files_metadata( + client: TestClient, + logged_user: dict[str, Any], + expected: int, +): + url = "/v0/storage/locations/0/datasets/N:asdfsdf/metadata" + assert url.startswith(PREFIX) + assert client.app + _url = client.app.router["list_dataset_files_metadata"].url_for( + location_id="0", dataset_id="N:asdfsdf" + ) + + assert url == str(_url) + + resp = await client.get(url) + data, error = await assert_status(resp, expected) + + if not error: + assert "json_schema_extra" in FileMetaDataGet.model_config + + assert len(data) == len(FileMetaDataGet.model_json_schema()["examples"]) + assert data == [ + FileMetaDataGet.model_validate(e).model_dump(mode="json") + for e in FileMetaDataGet.model_json_schema()["examples"] + ] + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_storage_file_meta( + client: TestClient, + logged_user: dict[str, Any], + expected: int, + faker: Faker, +): + # tests redirect of path with quotes in path + file_id = f"{faker.uuid4()}/{faker.uuid4()}/a/b/c/d/e/dat" + quoted_file_id = quote(file_id, safe="") + url = f"/v0/storage/locations/0/files/{quoted_file_id}/metadata" + + assert url.startswith(PREFIX) + + resp = await client.get(url) + data, error = await assert_status(resp, expected) + + if not error: + assert data + model = FileMetaDataGet.model_validate(data) + assert model + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_storage_list_filter( + client: TestClient, + logged_user: dict[str, Any], + expected: int, +): + # tests composition of 2 queries + file_id = "a/b/c/d/e/dat" + url = "/v0/storage/locations/0/files/metadata?uuid_filter={}".format( + quote(file_id, safe="") + ) + + assert url.startswith(PREFIX) + + resp = await client.get(url) + data, error = await assert_status(resp, expected) + + if not error: + assert len(data) == 2 + for item in data: + model = FileMetaDataGet.model_validate(item) + assert model + + +@pytest.fixture +def file_id(faker: Faker) -> StorageFileID: + return TypeAdapter(StorageFileID).validate_python( + f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()} with spaces().dat" + ) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_upload_file( + client: TestClient, + logged_user: dict[str, Any], + expected: int, + file_id: StorageFileID, +): + url = f"/v0/storage/locations/0/files/{quote(file_id, safe='')}" + + assert url.startswith(PREFIX) + + resp = await client.put(url) + data, error = await assert_status(resp, expected) + if not error: + assert not error + assert data + file_upload_schema = FileUploadSchema.model_validate(data) + + # let's abort + resp = await client.post(f"{file_upload_schema.links.abort_upload.path}") + data, error = await assert_status(resp, status.HTTP_204_NO_CONTENT) + assert not error + assert not data + + +@pytest.fixture +def create_storage_rpc_client_mock( + mocker: MockerFixture, +) -> Callable[[str, str, Any], None]: + def _(module: str, method: str, result_or_exception: Any): + def side_effect(*args, **kwargs): + if isinstance(result_or_exception, Exception): + raise result_or_exception + + return result_or_exception + + for fct in (f"{module}.{method}",): + mocker.patch(fct, side_effect=side_effect) + + return _ + + +@pytest.mark.parametrize("user_role", _user_roles) +@pytest.mark.parametrize( + "backend_result_or_exception, expected_status", + [ + (AsyncJobGet(job_id=AsyncJobId(f"{_faker.uuid4()}")), status.HTTP_202_ACCEPTED), + ( + InvalidFileIdentifierError(file_id=Path("/my/file")), + status.HTTP_404_NOT_FOUND, + ), + ( + AccessRightError( + user_id=_faker.pyint(min_value=0), file_id=Path("/my/file") + ), + status.HTTP_403_FORBIDDEN, + ), + (JobSchedulerError(exc=_faker.text()), status.HTTP_500_INTERNAL_SERVER_ERROR), + ], + ids=lambda x: type(x).__name__, +) +async def test_data_export( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, str, Any], None], + faker: Faker, + backend_result_or_exception: Any, + expected_status: int, +): + create_storage_rpc_client_mock( + "simcore_service_webserver.storage._rest", + start_data_export.__name__, + backend_result_or_exception, + ) + + _body = DataExportPost( + paths=[f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()}"] + ) + response = await client.post( + f"/{API_VERSION}/storage/locations/0/export-data", data=_body.model_dump_json() + ) + assert response.status == expected_status + if response.status == status.HTTP_202_ACCEPTED: + Envelope[TaskGet].model_validate(await response.json()) + + +@pytest.mark.parametrize("user_role", _user_roles) +@pytest.mark.parametrize( + "backend_result_or_exception, expected_status", + [ + ( + AsyncJobStatus( + job_id=AsyncJobId(f"{_faker.uuid4()}"), + progress=ProgressReport(actual_value=0.5, total=1.0), + done=False, + ), + status.HTTP_200_OK, + ), + (JobSchedulerError(exc=_faker.text()), status.HTTP_500_INTERNAL_SERVER_ERROR), + (JobMissingError(job_id=_faker.uuid4()), status.HTTP_404_NOT_FOUND), + ], + ids=lambda x: type(x).__name__, +) +async def test_get_async_jobs_status( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, str, Any], None], + backend_result_or_exception: Any, + expected_status: int, +): + _job_id = AsyncJobId(_faker.uuid4()) + create_storage_rpc_client_mock( + "simcore_service_webserver.tasks._rest", + f"async_jobs.{async_jobs.status.__name__}", + backend_result_or_exception, + ) + + response = await client.get(f"/{API_VERSION}/tasks/{_job_id}") + assert response.status == expected_status + if response.status == status.HTTP_200_OK: + response_body_data = ( + Envelope[TaskStatus].model_validate(await response.json()).data + ) + assert response_body_data is not None + + +@pytest.mark.parametrize("user_role", _user_roles) +@pytest.mark.parametrize( + "backend_result_or_exception, expected_status", + [ + ( + AsyncJobAbort(result=True, job_id=AsyncJobId(_faker.uuid4())), + status.HTTP_204_NO_CONTENT, + ), + (JobSchedulerError(exc=_faker.text()), status.HTTP_500_INTERNAL_SERVER_ERROR), + (JobMissingError(job_id=_faker.uuid4()), status.HTTP_404_NOT_FOUND), + ], + ids=lambda x: type(x).__name__, +) +async def test_abort_async_jobs( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, str, Any], None], + faker: Faker, + backend_result_or_exception: Any, + expected_status: int, +): + _job_id = AsyncJobId(faker.uuid4()) + create_storage_rpc_client_mock( + "simcore_service_webserver.tasks._rest", + f"async_jobs.{async_jobs.cancel.__name__}", + backend_result_or_exception, + ) + + response = await client.delete(f"/{API_VERSION}/tasks/{_job_id}") + assert response.status == expected_status + + +@pytest.mark.parametrize("user_role", _user_roles) +@pytest.mark.parametrize( + "backend_result_or_exception, expected_status", + [ + (JobNotDoneError(job_id=_faker.uuid4()), status.HTTP_404_NOT_FOUND), + (AsyncJobResult(result=None), status.HTTP_200_OK), + (JobError(job_id=_faker.uuid4()), status.HTTP_500_INTERNAL_SERVER_ERROR), + (JobAbortedError(job_id=_faker.uuid4()), status.HTTP_410_GONE), + (JobSchedulerError(exc=_faker.text()), status.HTTP_500_INTERNAL_SERVER_ERROR), + (JobMissingError(job_id=_faker.uuid4()), status.HTTP_404_NOT_FOUND), + ], + ids=lambda x: type(x).__name__, +) +async def test_get_async_job_result( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, str, Any], None], + faker: Faker, + backend_result_or_exception: Any, + expected_status: int, +): + _job_id = AsyncJobId(faker.uuid4()) + create_storage_rpc_client_mock( + "simcore_service_webserver.tasks._rest", + f"async_jobs.{async_jobs.result.__name__}", + backend_result_or_exception, + ) + + response = await client.get(f"/{API_VERSION}/tasks/{_job_id}/result") + assert response.status == expected_status + + +@pytest.mark.parametrize("user_role", _user_roles) +@pytest.mark.parametrize( + "backend_result_or_exception, expected_status", + [ + ( + [ + AsyncJobGet( + job_id=AsyncJobId(_faker.uuid4()), + ) + ], + status.HTTP_200_OK, + ), + (JobSchedulerError(exc=_faker.text()), status.HTTP_500_INTERNAL_SERVER_ERROR), + ], + ids=lambda x: type(x).__name__, +) +async def test_get_user_async_jobs( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, str, Any], None], + backend_result_or_exception: Any, + expected_status: int, +): + create_storage_rpc_client_mock( + "simcore_service_webserver.tasks._rest", + f"async_jobs.{async_jobs.list_jobs.__name__}", + backend_result_or_exception, + ) + + response = await client.get(f"/{API_VERSION}/tasks") + assert response.status == expected_status + if response.status == status.HTTP_200_OK: + Envelope[list[TaskGet]].model_validate(await response.json()) + + +@pytest.mark.parametrize("user_role", _user_roles) +@pytest.mark.parametrize( + "http_method, href, backend_method, backend_object, return_status, return_schema", + [ + ( + "GET", + "status_href", + async_jobs.status.__name__, + AsyncJobStatus( + job_id=AsyncJobId(_faker.uuid4()), + progress=ProgressReport(actual_value=0.5, total=1.0), + done=False, + ), + status.HTTP_200_OK, + TaskStatus, + ), + ( + "DELETE", + "abort_href", + async_jobs.cancel.__name__, + AsyncJobAbort(result=True, job_id=AsyncJobId(_faker.uuid4())), + status.HTTP_204_NO_CONTENT, + None, + ), + ( + "GET", + "result_href", + async_jobs.result.__name__, + AsyncJobResult(result=None), + status.HTTP_200_OK, + TaskResult, + ), + ], +) +async def test_get_async_job_links( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, str, Any], None], + faker: Faker, + http_method: str, + href: str, + backend_method: str, + backend_object: Any, + return_status: int, + return_schema: OutputSchema | None, +): + create_storage_rpc_client_mock( + "simcore_service_webserver.storage._rest", + start_data_export.__name__, + AsyncJobGet(job_id=AsyncJobId(f"{_faker.uuid4()}")), + ) + + _body = DataExportPost( + paths=[f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()}"] + ) + response = await client.post( + f"/{API_VERSION}/storage/locations/0/export-data", data=_body.model_dump_json() + ) + assert response.status == status.HTTP_202_ACCEPTED + response_body_data = Envelope[TaskGet].model_validate(await response.json()).data + assert response_body_data is not None + + # Call the different links and check the correct model and return status + create_storage_rpc_client_mock( + "simcore_service_webserver.tasks._rest", + f"async_jobs.{backend_method}", + backend_object, + ) + response = await client.request( + http_method, URL(getattr(response_body_data, href)).path + ) + assert response.status == return_status + if return_schema: + Envelope[return_schema].model_validate(await response.json()) diff --git a/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py b/services/web/server/tests/unit/with_dbs/01/storage/test_storage_handlers.py similarity index 67% rename from services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py rename to services/web/server/tests/unit/with_dbs/01/storage/test_storage_handlers.py index 694d49a998b..61909c8742d 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/storage/test_storage_handlers.py @@ -5,14 +5,11 @@ import json -import urllib.parse from typing import Any import pytest -from aiohttp import web -from aiohttp.test_utils import TestClient, make_mocked_request -from faker import Faker -from models_library.api_schemas_storage import ( +from aiohttp.test_utils import TestClient +from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompleteResponse, FileUploadLinks, FileUploadSchema, @@ -23,14 +20,7 @@ from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp.rest_responses import wrap_as_envelope -from servicelib.request_keys import RQT_USERID_KEY from simcore_postgres_database.models.users import UserRole -from simcore_service_webserver.application_settings import setup_settings -from simcore_service_webserver.storage._handlers import ( - _from_storage_url, - _to_storage_url, -) -from yarl import URL @pytest.fixture @@ -40,8 +30,6 @@ def app_environment( return app_environment | setenvs_from_dict( monkeypatch, { - "WEBSERVER_DB_LISTENER": "0", - "WEBSERVER_GARBAGE_COLLECTOR": "null", "STORAGE_HOST": "fake-storage", }, ) @@ -53,7 +41,7 @@ async def _resp(*args, **kwargs) -> tuple[Any, int]: return (wrap_as_envelope(data=expected_response), 200) mocker.patch( - "simcore_service_webserver.storage._handlers._forward_request_to_storage", + "simcore_service_webserver.storage._rest._forward_request_to_storage", autospec=True, side_effect=_resp, ) @@ -62,7 +50,7 @@ def _resolve(*args, **kwargs) -> AnyUrl: return TypeAdapter(AnyUrl).validate_python("http://private-url") mocker.patch( - "simcore_service_webserver.storage._handlers._from_storage_url", + "simcore_service_webserver.storage._rest._from_storage_url", autospec=True, side_effect=_resolve, ) @@ -175,48 +163,3 @@ async def test_openapi_regression_test( decoded_response = await response.json() assert decoded_response["error"] is None assert decoded_response["data"] is not None - - -def test_url_storage_resolver_helpers(faker: Faker, app_environment: EnvVarsDict): - - app = web.Application() - setup_settings(app) - - # NOTE: careful, first we need to encode the "/" in this file path. - # For that we need safe="" option - assert urllib.parse.quote("/") == "/" - assert urllib.parse.quote("/", safe="") == "%2F" - assert urllib.parse.quote("%2F", safe="") == "%252F" - - file_id = urllib.parse.quote(f"{faker.uuid4()}/{faker.uuid4()}/file.py", safe="") - assert "%2F" in file_id - assert "%252F" not in file_id - - url = URL(f"/v0/storage/locations/0/files/{file_id}:complete", encoded=True) - assert url.raw_parts[-1] == f"{file_id}:complete" - - web_request = make_mocked_request("GET", str(url), app=app) - web_request[RQT_USERID_KEY] = faker.pyint() - - # web -> storage - storage_url = _to_storage_url(web_request) - # Something like - # http://storage:123/v5/locations/0/files/e3e70...c07cd%2Ff7...55%2Ffile.py:complete?user_id=8376 - - assert storage_url.raw_parts[-1] == web_request.url.raw_parts[-1] - - assert storage_url.host == app_environment["STORAGE_HOST"] - assert storage_url.port == int(app_environment["STORAGE_PORT"]) - assert storage_url.query["user_id"] == str(web_request[RQT_USERID_KEY]) - - # storage -> web - web_url: AnyUrl = _from_storage_url( - web_request, TypeAdapter(AnyUrl).validate_python(f"{storage_url}") - ) - - assert storage_url.host != web_url.host - assert storage_url.port != web_url.port - - assert isinstance(storage_url, URL) # this is a bit inconvenient - assert isinstance(web_url, AnyUrl) - assert f"{web_url}" == f"{web_request.url}" diff --git a/services/web/server/tests/unit/with_dbs/01/test_api_keys.py b/services/web/server/tests/unit/with_dbs/01/test_api_keys.py index 85f63c42b96..cfd7b9f154a 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_api_keys.py +++ b/services/web/server/tests/unit/with_dbs/01/test_api_keys.py @@ -10,19 +10,28 @@ from http.client import HTTPException import pytest +import tenacity from aiohttp.test_utils import TestClient from faker import Faker from models_library.products import ProductName +from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status -from simcore_service_webserver.api_keys import _repository as repo -from simcore_service_webserver.api_keys._models import ApiKey -from simcore_service_webserver.api_keys._service import ( - get_or_create_api_key, - prune_expired_api_keys, +from simcore_service_webserver.api_keys import _repository, _service, api_keys_service +from simcore_service_webserver.api_keys.models import ApiKey +from simcore_service_webserver.application_settings import ( + ApplicationSettings, + get_application_settings, ) from simcore_service_webserver.db.models import UserRole +from tenacity import ( + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) @pytest.fixture @@ -31,10 +40,11 @@ async def fake_user_api_keys( logged_user: UserInfoDict, osparc_product_name: ProductName, faker: Faker, -) -> AsyncIterable[list[int]]: +) -> AsyncIterable[list[ApiKey]]: assert client.app + api_keys: list[ApiKey] = [ - await repo.create_api_key( + await _repository.create_api_key( client.app, user_id=logged_user["id"], product_name=osparc_product_name, @@ -49,7 +59,7 @@ async def fake_user_api_keys( yield api_keys for api_key in api_keys: - await repo.delete_api_key( + await _repository.delete_api_key( client.app, api_key_id=api_key.id, user_id=logged_user["id"], @@ -74,11 +84,11 @@ def _get_user_access_parametrizations(expected_authed_status_code): _get_user_access_parametrizations(status.HTTP_200_OK), ) async def test_list_api_keys( + disabled_setup_garbage_collector: MockType, client: TestClient, logged_user: UserInfoDict, user_role: UserRole, expected: HTTPStatus, - disable_gc_manual_guest_users: None, ): resp = await client.get("/v0/auth/api-keys") data, errors = await assert_status(resp, expected) @@ -92,11 +102,11 @@ async def test_list_api_keys( _get_user_access_parametrizations(status.HTTP_200_OK), ) async def test_create_api_key( + disabled_setup_garbage_collector: MockType, client: TestClient, logged_user: UserInfoDict, user_role: UserRole, expected: HTTPStatus, - disable_gc_manual_guest_users: None, ): display_name = "foo" resp = await client.post("/v0/auth/api-keys", json={"displayName": display_name}) @@ -118,12 +128,12 @@ async def test_create_api_key( _get_user_access_parametrizations(status.HTTP_204_NO_CONTENT), ) async def test_delete_api_keys( + disabled_setup_garbage_collector: MockType, client: TestClient, fake_user_api_keys: list[ApiKey], logged_user: UserInfoDict, user_role: UserRole, expected: HTTPStatus, - disable_gc_manual_guest_users: None, ): resp = await client.delete("/v0/auth/api-keys/0") await assert_status(resp, expected) @@ -133,41 +143,58 @@ async def test_delete_api_keys( await assert_status(resp, expected) +EXPIRATION_WAIT_FACTOR = 1.2 + + @pytest.mark.parametrize( "user_role,expected", _get_user_access_parametrizations(status.HTTP_200_OK), ) async def test_create_api_key_with_expiration( + disabled_setup_garbage_collector: MockType, client: TestClient, logged_user: UserInfoDict, user_role: UserRole, expected: HTTPStatus, - disable_gc_manual_guest_users: None, + mocker: MockerFixture, ): assert client.app + # test gc is actually disabled + gc_prune_mock = mocker.patch( + "simcore_service_webserver.garbage_collector._tasks_api_keys.create_background_task_to_prune_api_keys", + spec=True, + ) + assert not gc_prune_mock.called + + expected_api_key = "foo" + # create api-keys with expiration interval expiration_interval = timedelta(seconds=1) resp = await client.post( "/v0/auth/api-keys", - json={"displayName": "foo", "expiration": expiration_interval.seconds}, + json={ + "displayName": expected_api_key, + "expiration": expiration_interval.seconds, + }, ) data, errors = await assert_status(resp, expected) if not errors: - assert data["displayName"] == "foo" + assert data["displayName"] == expected_api_key assert "apiKey" in data assert "apiSecret" in data # list created api-key resp = await client.get("/v0/auth/api-keys") data, _ = await assert_status(resp, expected) - assert [d["displayName"] for d in data] == ["foo"] + assert [d["displayName"] for d in data] == [expected_api_key] # wait for api-key for it to expire and force-run scheduled task - await asyncio.sleep(expiration_interval.seconds) - deleted = await prune_expired_api_keys(client.app) - assert deleted == ["foo"] + await asyncio.sleep(EXPIRATION_WAIT_FACTOR * expiration_interval.seconds) + + deleted = await api_keys_service.prune_expired_api_keys(client.app) + assert deleted == [expected_api_key] resp = await client.get("/v0/auth/api-keys") data, _ = await assert_status(resp, expected) @@ -175,6 +202,7 @@ async def test_create_api_key_with_expiration( async def test_get_or_create_api_key( + disabled_setup_garbage_collector: MockType, client: TestClient, ): async with NewUser( @@ -190,13 +218,15 @@ async def test_get_or_create_api_key( } # create once - created = await get_or_create_api_key(client.app, **options) + created = await _service.get_or_create_api_key(client.app, **options) assert created.display_name == "foo" assert created.api_key != created.api_secret # idempotent for _ in range(3): - assert await get_or_create_api_key(client.app, **options) == created + assert ( + await _service.get_or_create_api_key(client.app, **options) == created + ) @pytest.mark.parametrize( @@ -204,14 +234,58 @@ async def test_get_or_create_api_key( _get_user_access_parametrizations(status.HTTP_404_NOT_FOUND), ) async def test_get_not_existing_api_key( + disabled_setup_garbage_collector: MockType, client: TestClient, logged_user: UserInfoDict, user_role: UserRole, expected: HTTPException, - disable_gc_manual_guest_users: None, ): resp = await client.get("/v0/auth/api-keys/42") data, errors = await assert_status(resp, expected) if not errors: assert data is None + + +@pytest.fixture +async def app_environment( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_GARBAGE_COLLECTOR": '{"GARBAGE_COLLECTOR_INTERVAL_S": 30, "GARBAGE_COLLECTOR_PRUNE_APIKEYS_INTERVAL_S": 1}' + }, + ) + + +async def test_prune_expired_api_keys_task_is_triggered( + app_environment: EnvVarsDict, + mocker: MockerFixture, + client: TestClient, +): + assert app_environment["WEBSERVER_GARBAGE_COLLECTOR"] is not None + + delete_expired_spy = mocker.spy(_repository, "delete_expired_api_keys") + + assert client.app + + settings: ApplicationSettings = get_application_settings(client.app) + assert settings.WEBSERVER_GARBAGE_COLLECTOR + + assert not delete_expired_spy.called + + async for attempt in tenacity.AsyncRetrying( + stop=stop_after_delay( + timedelta( + seconds=EXPIRATION_WAIT_FACTOR + * settings.WEBSERVER_GARBAGE_COLLECTOR.GARBAGE_COLLECTOR_EXPIRED_USERS_CHECK_INTERVAL_S + ) + ), + wait=wait_fixed(1), + retry=retry_if_exception_type(AssertionError), + reraise=True, + ): + with attempt: + delete_expired_spy.assert_called() diff --git a/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py b/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py index aa45fd9fd2e..3053df38797 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py +++ b/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py @@ -23,8 +23,8 @@ from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.api_keys import _repository as repo -from simcore_service_webserver.api_keys._models import ApiKey from simcore_service_webserver.api_keys.errors import ApiKeyNotFoundError +from simcore_service_webserver.api_keys.models import ApiKey from simcore_service_webserver.application_settings import ApplicationSettings pytest_simcore_core_services_selection = [ diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py index 35733d100e6..40440879da0 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py @@ -10,7 +10,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, + RutPricingPlanGet, ) from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_simcore.aioresponses_mocker import AioResponsesMock @@ -29,8 +29,8 @@ def mock_rut_api_responses( assert client.app settings: ResourceUsageTrackerSettings = get_plugin_settings(client.app) - service_pricing_plan_get = PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][0], + service_pricing_plan_get = RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_json_schema()["examples"][0], ) aioresponses_mocker.get( re.compile(f"^{settings.api_base_url}/services/+.+$"), diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py index 96ada757900..e005192edae 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py @@ -2,13 +2,16 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name +import re import urllib.parse from unittest.mock import MagicMock import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from models_library.api_schemas_catalog.services import ServiceGetV2 +from aioresponses import aioresponses as AioResponsesMock +from faker import Faker +from models_library.api_schemas_catalog.services import LatestServiceGet, ServiceGetV2 from models_library.api_schemas_webserver.catalog import ( CatalogServiceGet, CatalogServiceUpdate, @@ -22,10 +25,15 @@ from pydantic import NonNegativeInt, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.faker_factories import random_icon_url from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status +from simcore_service_webserver.catalog._controller_rest_schemas import ( + ServiceInputGet, + ServiceOutputGet, +) from simcore_service_webserver.db.models import UserRole @@ -55,12 +63,12 @@ async def _list( assert product_name assert user_id - items = TypeAdapter(list[ServiceGetV2]).validate_python( - ServiceGetV2.model_config["json_schema_extra"]["examples"], + items = TypeAdapter(list[LatestServiceGet]).validate_python( + LatestServiceGet.model_json_schema()["examples"], ) total_count = len(items) - return PageRpc[ServiceGetV2].create( + return PageRpc[LatestServiceGet].create( items[offset : offset + limit], total=total_count, limit=limit, @@ -80,7 +88,7 @@ async def _get( assert user_id got = ServiceGetV2.model_validate( - ServiceGetV2.model_config["json_schema_extra"]["examples"][0] + ServiceGetV2.model_json_schema()["examples"][0] ) got.version = service_version got.key = service_key @@ -101,7 +109,7 @@ async def _update( assert user_id got = ServiceGetV2.model_validate( - ServiceGetV2.model_config["json_schema_extra"]["examples"][0] + ServiceGetV2.model_json_schema()["examples"][0] ) got.version = service_version got.key = service_key @@ -109,17 +117,17 @@ async def _update( return { "list_services_paginated": mocker.patch( - "simcore_service_webserver.catalog._api.catalog_rpc.list_services_paginated", + "simcore_service_webserver.catalog._service.catalog_rpc.list_services_paginated", autospec=True, side_effect=_list, ), "get_service": mocker.patch( - "simcore_service_webserver.catalog._api.catalog_rpc.get_service", + "simcore_service_webserver.catalog._service.catalog_rpc.get_service", autospec=True, side_effect=_get, ), "update_service": mocker.patch( - "simcore_service_webserver.catalog._api.catalog_rpc.update_service", + "simcore_service_webserver.catalog._service.catalog_rpc.update_service", autospec=True, side_effect=_update, ), @@ -154,6 +162,196 @@ async def test_list_services_latest( assert mocked_rpc_catalog_service_api["list_services_paginated"].call_count == 1 +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_list_inputs( + client: TestClient, logged_user: UserInfoDict, aioresponses_mocker: AioResponsesMock +): + + url_pattern = re.compile(r"http://catalog:8000/v0/services/.*") + service_payload = ServiceGetV2.model_json_schema()["examples"][0] + aioresponses_mocker.get( + url_pattern, + status=status.HTTP_200_OK, + payload=service_payload, + ) + + service_key = "simcore/services/comp/itis/sleeper" + service_version = "0.1.0" + assert client.app and client.app.router + url = client.app.router["list_service_inputs"].url_for( + service_key=urllib.parse.quote(service_key, safe=""), + service_version=service_version, + ) + + response = await client.get(f"{url}") + data, _ = await assert_status(response, status.HTTP_200_OK) + TypeAdapter(list[ServiceInputGet]).validate_python(data) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_list_outputs( + client: TestClient, logged_user: UserInfoDict, aioresponses_mocker: AioResponsesMock +): + + url_pattern = re.compile(r"http://catalog:8000/v0/services/.*") + service_payload = ServiceGetV2.model_json_schema()["examples"][0] + aioresponses_mocker.get( + url_pattern, + status=status.HTTP_200_OK, + payload=service_payload, + ) + + service_key = "simcore/services/comp/itis/sleeper" + service_version = "0.1.0" + assert client.app and client.app.router + url = client.app.router["list_service_outputs"].url_for( + service_key=urllib.parse.quote(service_key, safe=""), + service_version=service_version, + ) + + response = await client.get(f"{url}") + data, _ = await assert_status(response, status.HTTP_200_OK) + TypeAdapter(list[ServiceOutputGet]).validate_python(data) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_get_outputs( + client: TestClient, logged_user: UserInfoDict, aioresponses_mocker: AioResponsesMock +): + + url_pattern = re.compile(r"http://catalog:8000/v0/services/.*") + service_payload = ServiceGetV2.model_json_schema()["examples"][0] + aioresponses_mocker.get( + url_pattern, + status=status.HTTP_200_OK, + payload=service_payload, + ) + + service_key = "simcore/services/comp/itis/sleeper" + service_version = "0.1.0" + assert client.app and client.app.router + url = client.app.router["get_service_output"].url_for( + service_key=urllib.parse.quote(service_key, safe=""), + service_version=service_version, + output_key=next(iter(service_payload["outputs"].keys())), + ) + + response = await client.get(f"{url}") + data, _ = await assert_status(response, status.HTTP_200_OK) + ServiceOutputGet.model_validate(data) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_get_inputs( + client: TestClient, logged_user: UserInfoDict, aioresponses_mocker: AioResponsesMock +): + url_pattern = re.compile(r"http://catalog:8000/v0/services/.*") + service_payload = ServiceGetV2.model_json_schema()["examples"][0] + aioresponses_mocker.get( + url_pattern, + status=status.HTTP_200_OK, + payload=service_payload, + ) + + service_key = "simcore/services/comp/itis/sleeper" + service_version = "0.1.0" + assert client.app and client.app.router + url = client.app.router["get_service_input"].url_for( + service_key=urllib.parse.quote(service_key, safe=""), + service_version=service_version, + input_key=next(iter(service_payload["inputs"].keys())), + ) + response = await client.get(f"{url}") + data, _ = await assert_status(response, status.HTTP_200_OK) + ServiceInputGet.model_validate(data) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_get_compatible_inputs_given_source_outputs( + client: TestClient, logged_user: UserInfoDict, aioresponses_mocker: AioResponsesMock +): + url_pattern = re.compile(r"http://catalog:8000/v0/services/.*") + service_payload = ServiceGetV2.model_json_schema()["examples"][0] + for _ in range(2): + aioresponses_mocker.get( + url_pattern, + status=status.HTTP_200_OK, + payload=service_payload, + ) + + service_key = "simcore/services/comp/itis/sleeper" + service_version = "0.1.0" + assert client.app and client.app.router + url = ( + client.app.router["get_compatible_inputs_given_source_output"] + .url_for( + service_key=urllib.parse.quote(service_key, safe=""), + service_version=service_version, + ) + .with_query( + { + "fromService": "simcore/services/comp/itis/sleeper", + "fromVersion": "0.1.0", + "fromOutput": "output_1", + } + ) + ) + response = await client.get(f"{url}") + _, _ = await assert_status(response, status.HTTP_200_OK) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_get_compatible_outputs_given_target_inptuts( + client: TestClient, logged_user: UserInfoDict, aioresponses_mocker: AioResponsesMock +): + url_pattern = re.compile(r"http://catalog:8000/v0/services/.*") + service_payload = ServiceGetV2.model_json_schema()["examples"][0] + for _ in range(2): + aioresponses_mocker.get( + url_pattern, + status=status.HTTP_200_OK, + payload=service_payload, + ) + + service_key = "simcore/services/comp/itis/sleeper" + service_version = "0.1.0" + assert client.app and client.app.router + url = ( + client.app.router["get_compatible_outputs_given_target_input"] + .url_for( + service_key=urllib.parse.quote(service_key, safe=""), + service_version=service_version, + ) + .with_query( + { + "toService": "simcore/services/comp/itis/sleeper", + "toVersion": "0.1.0", + "toInput": "input_1", + } + ) + ) + response = await client.get(f"{url}") + _, _ = await assert_status(response, status.HTTP_200_OK) + + @pytest.mark.parametrize( "user_role", [UserRole.USER], @@ -162,6 +360,7 @@ async def test_get_and_patch_service( client: TestClient, logged_user: UserInfoDict, mocked_rpc_catalog_service_api: dict[str, MagicMock], + faker: Faker, ): assert client.app assert client.app.router @@ -190,8 +389,8 @@ async def test_get_and_patch_service( # PATCH update = CatalogServiceUpdate( name="foo", - thumbnail=None, description="bar", + icon=random_icon_url(faker), classifiers=None, versionDisplay="Some nice name", descriptionUi=True, @@ -209,6 +408,7 @@ async def test_get_and_patch_service( assert model.key == service_key assert model.version == service_version assert model.name == update.name + assert model.icon == update.icon assert model.description == update.description assert model.description_ui == update.description_ui assert model.version_display == update.version_display diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_rest_client.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_rest_client.py new file mode 100644 index 00000000000..452ecfd76ec --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_rest_client.py @@ -0,0 +1,112 @@ +# pylint:disable=unused-argument +import re + +import pytest +from aiohttp.test_utils import TestClient +from aioresponses import aioresponses as AioResponsesMock +from common_library.users_enums import UserRole +from models_library.api_schemas_catalog.service_access_rights import ( + ServiceAccessRightsGet, +) +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.catalog._controller_rest_exceptions import ( + DefaultPricingUnitForServiceNotFoundError, +) +from simcore_service_webserver.catalog.catalog_service import ( + get_service_access_rights, + get_services_for_user_in_product, + is_catalog_service_responsive, +) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +@pytest.mark.parametrize( + "backend_status_code", [status.HTTP_200_OK, status.HTTP_500_INTERNAL_SERVER_ERROR] +) +async def test_server_responsive( + client: TestClient, + logged_user: UserInfoDict, + aioresponses_mocker: AioResponsesMock, + backend_status_code: int, +): + aioresponses_mocker.get("http://catalog:8000", status=backend_status_code) + + assert client.app + is_responsive = await is_catalog_service_responsive(app=client.app) + if backend_status_code == status.HTTP_200_OK: + assert is_responsive == True + else: + assert is_responsive == False + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +@pytest.mark.parametrize( + "backend_status_code", [status.HTTP_200_OK, status.HTTP_404_NOT_FOUND] +) +async def test_get_services_for_user_in_product( + client: TestClient, + logged_user: UserInfoDict, + aioresponses_mocker: AioResponsesMock, + backend_status_code: int, +): + url_pattern = re.compile(r"http://catalog:8000/.*") + aioresponses_mocker.get( + url_pattern, + status=backend_status_code, + ) + assert client.app + _ = await get_services_for_user_in_product( + app=client.app, + user_id=logged_user["id"], + product_name="osparc", + only_key_versions=False, + ) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_get_service_access_rights( + client: TestClient, + logged_user: UserInfoDict, + aioresponses_mocker: AioResponsesMock, +): + url_pattern = re.compile(r"http://catalog:8000/.*") + example = ServiceAccessRightsGet( + service_key="simcore/services/comp/itis/sleeper", + service_version="2.1.4", + gids_with_access_rights={ + 1: {"execute_access": True}, + 5: {"execute_access": True}, + }, + ) + aioresponses_mocker.get( + url_pattern, + status=status.HTTP_200_OK, + payload=example.model_dump(), + ) + assert client.app + access_rights = await get_service_access_rights( + app=client.app, + user_id=logged_user["id"], + service_key="simcore/services/comp/itis/sleeper", + service_version="2.1.4", + product_name="osparc", + ) + assert isinstance(access_rights, ServiceAccessRightsGet) + + +async def test_catalog_exceptions(): + + error = DefaultPricingUnitForServiceNotFoundError( + service_key="key", service_version="version" + ) + assert isinstance(error.debug_message(), str) diff --git a/services/web/server/tests/unit/with_dbs/01/test_director_v2.py b/services/web/server/tests/unit/with_dbs/01/test_director_v2.py index f18bc9e1754..9c5914e3112 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_director_v2.py +++ b/services/web/server/tests/unit/with_dbs/01/test_director_v2.py @@ -10,7 +10,7 @@ from models_library.projects_pipeline import ComputationTask from models_library.projects_state import RunningState from models_library.users import UserID -from simcore_service_webserver.director_v2 import api +from simcore_service_webserver.director_v2 import director_v2_service @pytest.fixture() @@ -37,7 +37,7 @@ async def test_create_pipeline( project_id: ProjectID, osparc_product_name: str, ): - task_out = await api.create_or_update_pipeline( + task_out = await director_v2_service.create_or_update_pipeline( client.app, user_id, project_id, osparc_product_name ) assert task_out @@ -51,7 +51,9 @@ async def test_get_computation_task( user_id: UserID, project_id: ProjectID, ): - task_out = await api.get_computation_task(client.app, user_id, project_id) + task_out = await director_v2_service.get_computation_task( + client.app, user_id, project_id + ) assert task_out assert isinstance(task_out, ComputationTask) assert task_out.state == RunningState.NOT_STARTED @@ -60,4 +62,4 @@ async def test_get_computation_task( async def test_delete_pipeline( mocked_director_v2, client, user_id: UserID, project_id: ProjectID ): - await api.delete_pipeline(client.app, user_id, project_id) + await director_v2_service.delete_pipeline(client.app, user_id, project_id) diff --git a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py index 613e32cee19..cb40a779378 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py @@ -58,12 +58,13 @@ async def test_start_partial_computation( project_id: ProjectID, user_role: UserRole, expected: ExpectedResponse, + faker: Faker, ): assert client.app url = client.app.router["start_computation"].url_for(project_id=f"{project_id}") rsp = await client.post( - f"{url}", json={"subgraph": ["node_id1", "node_id2", "node_id498"]} + f"{url}", json={"subgraph": [faker.uuid4(), faker.uuid4(), faker.uuid4()]} ) data, error = await assert_status( rsp, diff --git a/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py b/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py index 98fa573cd08..c77f1335015 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_groups_classifiers.py @@ -7,7 +7,7 @@ import pytest import sqlalchemy as sa from servicelib.common_aiopg_utils import DataSourceName, create_pg_engine -from simcore_service_webserver._constants import APP_AIOPG_ENGINE_KEY +from simcore_service_webserver.constants import APP_AIOPG_ENGINE_KEY from simcore_service_webserver.groups._classifiers_service import ( GroupClassifierRepository, ) diff --git a/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py b/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py index 4e3f10a9c4d..c6f58f29ee1 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py +++ b/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py @@ -2,11 +2,15 @@ # pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable +# pylint: disable=no-self-use +# pylint: disable=no-self-argument from typing import Any import pytest from aiohttp.test_utils import TestClient +from faker import Faker +from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, @@ -63,3 +67,23 @@ async def test_listing_tasks_empty( assert not data return assert data == [] + + +@pytest.mark.parametrize("user_role", [UserRole.GUEST, UserRole.TESTER, UserRole.USER]) +async def test_listing_tasks_with_list_inprocess_tasks_error( + client: TestClient, logged_user, faker: Faker, mocker: MockerFixture +): + assert client.app + + class _DummyTaskManager: + def list_tasks(self, *args, **kwargs): + raise Exception() # pylint: disable=broad-exception-raised + + mocker.patch( + "servicelib.aiohttp.long_running_tasks._routes.get_tasks_manager", + return_value=_DummyTaskManager(), + ) + + _async_jobs_listing_path = client.app.router["get_async_jobs"].url_for() + resp = await client.request("GET", f"{_async_jobs_listing_path}") + assert resp.status == status.HTTP_500_INTERNAL_SERVER_ERROR diff --git a/services/web/server/tests/unit/with_dbs/01/test_statics.py b/services/web/server/tests/unit/with_dbs/01/test_statics.py index d3ba4448061..dfd32405f23 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_statics.py +++ b/services/web/server/tests/unit/with_dbs/01/test_statics.py @@ -22,7 +22,10 @@ from simcore_service_webserver.statics._constants import ( APP_FRONTEND_CACHED_STATICS_JSON_KEY, ) -from simcore_service_webserver.statics._events import create_and_cache_statics_json +from simcore_service_webserver.statics._events import ( + _get_release_notes_vtag, + create_and_cache_statics_json, +) from simcore_service_webserver.statics.plugin import setup_statics @@ -154,3 +157,15 @@ async def test_create_and_cache_statics_json_vendor_vcs_overwrite( product_dict = json.loads(product_data) assert product_dict.get("vcsReleaseTag") == vcs_release_tag assert product_dict.get("vcsReleaseUrl") == expected_vcs_url + + +@pytest.mark.parametrize( + "vtag, expected_vtag", + [ + ("v1.11.34", "v1.11.0"), + ("v1.11.8", "v1.11.0"), + ("v1.11.0", "v1.11.0"), + ], +) +def test__get_release_notes_vtag(vtag: str, expected_vtag: str): + assert _get_release_notes_vtag(vtag) == expected_vtag diff --git a/services/web/server/tests/unit/with_dbs/01/test_storage.py b/services/web/server/tests/unit/with_dbs/01/test_storage.py deleted file mode 100644 index e03c838cd0a..00000000000 --- a/services/web/server/tests/unit/with_dbs/01/test_storage.py +++ /dev/null @@ -1,317 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - -import asyncio -from collections.abc import Awaitable, Callable -from urllib.parse import quote - -import pytest -from aiohttp import web -from aiohttp.test_utils import TestClient, TestServer -from faker import Faker -from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.aiohttp import status -from servicelib.aiohttp.application import create_safe_application -from simcore_postgres_database.models.users import UserRole - -API_VERSION = "v0" - - -# TODO: create a fake storage service here -@pytest.fixture() -def storage_server( - event_loop: asyncio.AbstractEventLoop, - aiohttp_server: Callable[..., Awaitable[TestServer]], - app_environment: EnvVarsDict, - storage_test_server_port: int, -) -> TestServer: - async def _get_locs(request: web.Request): - assert not request.can_read_body - - query = request.query - assert query - assert "user_id" in query - - assert query["user_id"], "Expected user id" - return web.json_response( - { - "data": [ - {"user_id": int(query["user_id"])}, - ] - } - ) - - async def _post_sync_meta_data(request: web.Request): - assert not request.can_read_body - - query = request.query - assert query - assert "dry_run" in query - - assert query["dry_run"] == "true" - return web.json_response( - { - "data": {"removed": []}, - } - ) - - async def _get_filemeta(request: web.Request): - assert not request.can_read_body - - query = request.query - assert query - assert "user_id" in query - - assert query["user_id"], "Expected user id" - - return web.json_response( - { - "data": [ - {"filemeta": 42}, - ] - } - ) - - async def _get_filtered_list(request: web.Request): - assert not request.can_read_body - - query = request.query - assert query - assert "user_id" in query - assert query["user_id"], "Expected user id" - assert query["uuid_filter"], "expected a filter" - - return web.json_response( - { - "data": [ - {"uuid_filter": query["uuid_filter"]}, - ] - } - ) - - async def _get_datasets(request: web.Request): - assert not request.can_read_body - - query = request.query - assert query - assert "user_id" in query - - assert query["user_id"], "Expected user id" - - return web.json_response( - { - "data": [ - {"dataset_id": "asdf", "display_name": "bbb"}, - ] - } - ) - - async def _get_datasets_meta(request: web.Request): - assert not request.can_read_body - - query = request.query - assert query - assert "user_id" in query - - assert query["user_id"], "Expected user id" - - return web.json_response( - { - "data": [ - {"dataset_id": "asdf", "display_name": "bbb"}, - ] - } - ) - - storage_api_version = app_environment["STORAGE_VTAG"] - storage_port = int(app_environment["STORAGE_PORT"]) - assert storage_port == storage_test_server_port - - assert ( - storage_api_version != API_VERSION - ), "backend service w/ different version as webserver entrypoint" - - app = create_safe_application() - app.router.add_get(f"/{storage_api_version}/locations", _get_locs) - app.router.add_post( - f"/{storage_api_version}/locations/0:sync", _post_sync_meta_data - ) - app.router.add_get( - f"/{storage_api_version}/locations/0/files/{{file_id}}/metadata", _get_filemeta - ) - app.router.add_get( - f"/{storage_api_version}/locations/0/files/metadata", _get_filtered_list - ) - app.router.add_get(f"/{storage_api_version}/locations/0/datasets", _get_datasets) - app.router.add_get( - f"/{storage_api_version}/locations/0/datasets/{{dataset_id}}/metadata", - _get_datasets_meta, - ) - - return event_loop.run_until_complete(aiohttp_server(app, port=storage_port)) - - -# -------------------------------------------------------------------------- -PREFIX = "/" + API_VERSION + "/storage" - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_200_OK), - (UserRole.USER, status.HTTP_200_OK), - (UserRole.TESTER, status.HTTP_200_OK), - ], -) -async def test_get_storage_locations( - client: TestClient, storage_server: TestServer, logged_user, expected -): - url = "/v0/storage/locations" - assert url.startswith(PREFIX) - - resp = await client.get(url, params={"user_id": logged_user["id"]}) - data, error = await assert_status(resp, expected) - - if not error: - assert len(data) == 1 - assert data[0]["user_id"] == logged_user["id"] - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_403_FORBIDDEN), - (UserRole.USER, status.HTTP_403_FORBIDDEN), - (UserRole.TESTER, status.HTTP_403_FORBIDDEN), - (UserRole.ADMIN, status.HTTP_200_OK), - ], -) -async def test_sync_file_meta_table( - client: TestClient, storage_server: TestServer, logged_user, expected -): - url = "/v0/storage/locations/0:sync" - assert url.startswith(PREFIX) - - resp = await client.post(url, params={"dry_run": "true"}) - data, error = await assert_status(resp, expected) - - if not error: - # the test of the functionality is already done in storage - assert "removed" in data - assert not data["removed"] - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_200_OK), - (UserRole.USER, status.HTTP_200_OK), - (UserRole.TESTER, status.HTTP_200_OK), - ], -) -async def test_get_datasets_metadata( - client: TestClient, storage_server: TestServer, logged_user, expected -): - url = "/v0/storage/locations/0/datasets" - assert url.startswith(PREFIX) - - _url = client.app.router["get_datasets_metadata"].url_for(location_id="0") - - assert url == str(_url) - - resp = await client.get(url, params={"user_id": logged_user["id"]}) - data, error = await assert_status(resp, expected) - - if not error: - assert len(data) == 1 - assert data[0]["dataset_id"] == "asdf" - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_200_OK), - (UserRole.USER, status.HTTP_200_OK), - (UserRole.TESTER, status.HTTP_200_OK), - ], -) -async def test_get_files_metadata_dataset( - client: TestClient, storage_server: TestServer, logged_user, expected -): - url = "/v0/storage/locations/0/datasets/N:asdfsdf/metadata" - assert url.startswith(PREFIX) - - _url = client.app.router["get_files_metadata_dataset"].url_for( - location_id="0", dataset_id="N:asdfsdf" - ) - - assert url == str(_url) - - resp = await client.get(url, params={"user_id": logged_user["id"]}) - data, error = await assert_status(resp, expected) - - if not error: - assert len(data) == 1 - assert data[0]["dataset_id"] == "asdf" - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_200_OK), - (UserRole.USER, status.HTTP_200_OK), - (UserRole.TESTER, status.HTTP_200_OK), - ], -) -async def test_storage_file_meta( - client: TestClient, storage_server: TestServer, logged_user, expected, faker: Faker -): - # tests redirect of path with quotes in path - file_id = f"{faker.uuid4()}/{faker.uuid4()}/a/b/c/d/e/dat" - quoted_file_id = quote(file_id, safe="") - url = f"/v0/storage/locations/0/files/{quoted_file_id}/metadata" - - assert url.startswith(PREFIX) - - resp = await client.get(url, params={"user_id": logged_user["id"]}) - data, error = await assert_status(resp, expected) - - if not error: - assert len(data) == 1 - assert data[0]["filemeta"] == 42 - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_200_OK), - (UserRole.USER, status.HTTP_200_OK), - (UserRole.TESTER, status.HTTP_200_OK), - ], -) -async def test_storage_list_filter( - client: TestClient, storage_server: TestServer, logged_user, expected -): - # tests composition of 2 queries - file_id = "a/b/c/d/e/dat" - url = "/v0/storage/locations/0/files/metadata?uuid_filter={}".format( - quote(file_id, safe="") - ) - - assert url.startswith(PREFIX) - - resp = await client.get(url, params={"user_id": logged_user["id"]}) - data, error = await assert_status(resp, expected) - - if not error: - assert len(data) == 1 - assert data[0]["uuid_filter"] == file_id diff --git a/services/web/server/tests/unit/with_dbs/02/conftest.py b/services/web/server/tests/unit/with_dbs/02/conftest.py index 25be7db87c8..cf8cbb179de 100644 --- a/services/web/server/tests/unit/with_dbs/02/conftest.py +++ b/services/web/server/tests/unit/with_dbs/02/conftest.py @@ -92,12 +92,12 @@ def mock_catalog_api( ) -> dict[str, mock.Mock]: return { "get_service_resources": mocker.patch( - "simcore_service_webserver.projects.projects_service.catalog_client.get_service_resources", + "simcore_service_webserver.projects._projects_service.catalog_service.get_service_resources", return_value=mock_service_resources, autospec=True, ), "get_service": mocker.patch( - "simcore_service_webserver.projects.projects_service.catalog_client.get_service", + "simcore_service_webserver.projects._projects_service.catalog_service.get_service", return_value=mock_service, autospec=True, ), @@ -107,8 +107,8 @@ def mock_catalog_api( @pytest.fixture async def user_project( client: TestClient, - fake_project, - logged_user, + fake_project: ProjectDict, + logged_user: UserInfoDict, tests_data_dir: Path, osparc_product_name: str, ) -> AsyncIterator[ProjectDict]: @@ -223,7 +223,7 @@ async def _creator(**prj_kwargs) -> ProjectDict: @pytest.fixture def fake_services( - create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]] + create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], ) -> Callable[..., Awaitable[list[DynamicServiceGet]]]: async def create_fakes(number_services: int) -> list[DynamicServiceGet]: return [await create_dynamic_service_mock() for _ in range(number_services)] @@ -374,7 +374,7 @@ def mock_get_total_project_dynamic_nodes_creation_interval( ) -> None: _VERY_LONG_LOCK_TIMEOUT_S: Final[float] = 300 mocker.patch( - "simcore_service_webserver.projects.projects_service._nodes_api" + "simcore_service_webserver.projects._projects_service._nodes_service" ".get_total_project_dynamic_nodes_creation_interval", return_value=_VERY_LONG_LOCK_TIMEOUT_S, ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_announcements.py b/services/web/server/tests/unit/with_dbs/02/test_announcements.py index 19ca7413827..11005e824e1 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_announcements.py +++ b/services/web/server/tests/unit/with_dbs/02/test_announcements.py @@ -20,7 +20,10 @@ from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.pydantic_models import iter_model_examples_in_module +from pytest_simcore.pydantic_models import ( + assert_validation_model, + iter_model_examples_in_module, +) from servicelib.aiohttp import status from settings_library.redis import RedisDatabase, RedisSettings from simcore_service_webserver.announcements._redis import ( @@ -185,9 +188,9 @@ async def test_list_announcements_filtered( def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.model_validate( - example_data - ), f"Failed {example_name} : {json.dumps(example_data)}" + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) def test_invalid_announcement(faker: Faker): diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__ports_api.py b/services/web/server/tests/unit/with_dbs/02/test_projects__ports_api.py index 0ff4d0f28f8..ec4eabc9665 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects__ports_api.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__ports_api.py @@ -9,7 +9,7 @@ import pytest from models_library.projects_nodes import Node, NodeID from models_library.utils.json_schema import jsonschema_validate_schema -from simcore_service_webserver.projects._ports_api import ( +from simcore_service_webserver.projects._ports_service import ( InvalidInputValue, _get_outputs_in_workbench, get_project_inputs, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py index 604ee40308c..9a187a1d081 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_comments_handlers.py @@ -16,7 +16,9 @@ from servicelib.aiohttp import status from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.projects._groups_db import update_or_insert_project_group +from simcore_service_webserver.projects._groups_repository import ( + update_or_insert_project_group, +) from simcore_service_webserver.projects.models import ProjectDict API_PREFIX = "/" + api_version_prefix diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index 5345d7e41b8..25d48013e13 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -39,9 +39,11 @@ from simcore_service_webserver.groups._groups_service import get_product_group_for_user from simcore_service_webserver.groups.api import auto_add_user_to_product_group from simcore_service_webserver.groups.exceptions import GroupNotFoundError -from simcore_service_webserver.products.api import get_product -from simcore_service_webserver.projects._permalink_api import ProjectPermalink +from simcore_service_webserver.products.products_service import get_product from simcore_service_webserver.projects.models import ProjectDict +from simcore_service_webserver.projects.projects_permalink_service import ( + ProjectPermalink, +) from simcore_service_webserver.utils import to_datetime from yarl import URL @@ -422,15 +424,68 @@ async def test_new_project( logged_user: UserInfoDict, primary_group, expected: ExpectedResponse, + request_create_project: Callable[..., Awaitable[ProjectDict]], storage_subsystem_mock, project_db_cleaner, - request_create_project: Callable[..., Awaitable[ProjectDict]], ): await request_create_project( client, expected.accepted, expected.created, logged_user, primary_group ) +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_create_get_and_patch_project_ui_field( + mock_dynamic_scheduler: None, + storage_subsystem_mock, + client: TestClient, + logged_user: UserInfoDict, + primary_group: dict[str, str], + request_create_project: Callable[..., Awaitable[ProjectDict]], + catalog_subsystem_mock: Callable[[list[ProjectDict]], None], + project_db_cleaner, +): + assert client.app + + gid = logged_user["primary_gid"] + assert primary_group["gid"] == gid + + # Step 1: Create project (long running task) + new_project = await request_create_project( + client, + status.HTTP_202_ACCEPTED, + status.HTTP_201_CREATED, + logged_user, + primary_group, + ) + project_id = new_project["uuid"] + + catalog_subsystem_mock([new_project]) + + # Step 2: Get the project and check the ui.icon + url = client.app.router["get_project"].url_for(project_id=project_id) + resp = await client.get(f"{url}") + got_project, _ = await assert_status(resp, status.HTTP_200_OK) + assert got_project["ui"] == {} + + # Step 3: Patch the project to set ui.icon to null + patch_data = {"ui": {"icon": "http://example.com/icon.png"}} + url = client.app.router["patch_project"].url_for(project_id=project_id) + resp = await client.patch(f"{url}", json=patch_data) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # Step 4: Get the project again and check the ui.icon is now null + resp = await client.get(f"{url}") + got_project, _ = await assert_status(resp, status.HTTP_200_OK) + assert got_project["ui"]["icon"] == "http://example.com/icon.png" + + # Step 5: Delete project + resp = await client.delete(f"{url}") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_template( mock_dynamic_scheduler: None, diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py index 15542cff620..fa7ed48abeb 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py @@ -21,7 +21,7 @@ from simcore_postgres_database.models.workspaces import workspaces from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.folders._folders_service import create_folder -from simcore_service_webserver.projects._folders_api import move_project_into_folder +from simcore_service_webserver.projects._folders_service import move_project_into_folder from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.workspaces._workspaces_service import create_workspace from yarl import URL diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py index 8324aff33a2..b81d8b99dd3 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py @@ -42,7 +42,7 @@ def standard_user_role() -> tuple[str, tuple[UserRole, ExpectedResponse]]: @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py index 04b4db5b7e8..dbb33ea5ecd 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py @@ -25,7 +25,7 @@ @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", + "simcore_service_webserver.projects._controller.projects_rest.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) @@ -34,7 +34,7 @@ def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): @pytest.fixture def mock_project_uses_available_services(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_handlers.project_uses_available_services", + "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", spec=True, return_value=True, ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py index 76569151068..5f89e4aa032 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py @@ -20,7 +20,7 @@ @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", + "simcore_service_webserver.projects._controller.projects_rest.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) @@ -29,21 +29,12 @@ def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): @pytest.fixture def mock_project_uses_available_services(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_handlers.project_uses_available_services", + "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", spec=True, return_value=True, ) -@pytest.fixture -def mock_catalog_api_get_services_for_user_in_product_2(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - - @pytest.mark.acceptance_test( "Driving test for https://github.com/ITISFoundation/osparc-issues/issues/1547" ) @@ -55,7 +46,6 @@ async def test_projects_groups_full_workflow( expected: HTTPStatus, mock_catalog_api_get_services_for_user_in_product, mock_project_uses_available_services, - mock_catalog_api_get_services_for_user_in_product_2, ): assert client.app # check the default project permissions diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py index ce87970f75c..dae450a88fe 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py @@ -63,7 +63,7 @@ async def test_custom_metadata_handlers( response = await client.get(f"{url}") _, error = await assert_status(response, expected_status_code=expected.not_found) - error_message = error["errors"][0]["message"] + error_message = error["message"] assert invalid_project_id in error_message assert "project" in error_message.lower() diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index c5fa6330978..4db7e6f8a22 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -24,7 +24,10 @@ from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStop, ) -from models_library.api_schemas_storage import FileMetaDataGet, PresignedLink +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + PresignedLink, +) from models_library.generics import Envelope from models_library.projects_nodes_io import NodeID from models_library.services_resources import ( @@ -45,7 +48,9 @@ from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from simcore_postgres_database.models.projects import projects as projects_db_model from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.projects._nodes_handlers import _ProjectNodePreview +from simcore_service_webserver.projects._controller.nodes_rest import ( + _ProjectNodePreview, +) from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py index 1f6f18cc00a..3bba1eaf118 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py @@ -29,7 +29,7 @@ @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", + "simcore_service_webserver.projects._controller.projects_rest.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) @@ -38,7 +38,7 @@ def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): @pytest.fixture def mock_project_uses_available_services(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_handlers.project_uses_available_services", + "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", spec=True, return_value=True, ) @@ -47,7 +47,7 @@ def mock_project_uses_available_services(mocker: MockerFixture): @pytest.fixture def mock_catalog_rpc_check_for_service(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects.projects_service.catalog_rpc.check_for_service", + "simcore_service_webserver.projects._projects_service.catalog_rpc.check_for_service", spec=True, return_value=True, ) @@ -56,7 +56,7 @@ def mock_catalog_rpc_check_for_service(mocker: MockerFixture): @pytest.fixture def mocked_notify_project_node_update(mocker: MockerFixture): return mocker.patch( - "simcore_service_webserver.projects.projects_service.notify_project_node_update", + "simcore_service_webserver.projects._projects_service.notify_project_node_update", ) @@ -356,21 +356,21 @@ async def test_patch_project_node_service_key_with_error( ): node_id = next(iter(user_project["workbench"])) assert client.app - base_url = client.app.router["patch_project_node"].url_for( + url = client.app.router["patch_project_node"].url_for( project_id=user_project["uuid"], node_id=node_id ) _patch_version = {"version": "2.0.9"} with mocker.patch( - "simcore_service_webserver.projects.projects_service.catalog_rpc.check_for_service", + "simcore_service_webserver.projects._projects_service.catalog_rpc.check_for_service", side_effect=CatalogForbiddenError(name="test"), ): - resp = await client.patch(f"{base_url}", json=_patch_version) + resp = await client.patch(f"{url}", json=_patch_version) assert resp.status == status.HTTP_403_FORBIDDEN with mocker.patch( - "simcore_service_webserver.projects.projects_service.catalog_rpc.check_for_service", + "simcore_service_webserver.projects._projects_service.catalog_rpc.check_for_service", side_effect=CatalogItemNotFoundError(name="test"), ): - resp = await client.patch(f"{base_url}", json=_patch_version) + resp = await client.patch(f"{url}", json=_patch_version) assert resp.status == status.HTTP_404_NOT_FOUND diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py index 3e8a4d9e2b4..238cba62055 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py @@ -13,9 +13,13 @@ from models_library.api_schemas_catalog.service_access_rights import ( ServiceAccessRightsGet, ) +from models_library.api_schemas_catalog.services import MyServiceGet +from models_library.services_history import ServiceRelease from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status +from servicelib.rabbitmq import RPCServerError from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict from yarl import URL @@ -55,7 +59,7 @@ def fake_project( @pytest.fixture def mock_catalog_api_get_service_access_rights_response(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.catalog_client.get_service_access_rights", + "simcore_service_webserver.projects._controller.nodes_rest.catalog_service.get_service_access_rights", spec=True, side_effect=[ ServiceAccessRightsGet( @@ -95,9 +99,9 @@ def mock_catalog_api_get_service_access_rights_response(mocker: MockerFixture): async def test_user_role_access( client: TestClient, user_project: ProjectDict, - logged_user: dict, + logged_user: UserInfoDict, expected: HTTPStatus, - mock_catalog_api_get_service_access_rights_response, + mock_catalog_api_get_service_access_rights_response: None, ): assert client.app @@ -123,10 +127,10 @@ async def test_accessible_thanks_to_everyone_group_id( client: TestClient, user_project: ProjectDict, mocker: MockerFixture, - logged_user: dict, + logged_user: UserInfoDict, ): mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.catalog_client.get_service_access_rights", + "simcore_service_webserver.projects._controller.nodes_rest.catalog_service.get_service_access_rights", spec=True, side_effect=[ ServiceAccessRightsGet( @@ -176,12 +180,12 @@ async def test_accessible_thanks_to_concrete_group_id( client: TestClient, user_project: ProjectDict, mocker: MockerFixture, - logged_user: dict, + logged_user: UserInfoDict, ): for_gid = logged_user["primary_gid"] mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.catalog_client.get_service_access_rights", + "simcore_service_webserver.projects._controller.nodes_rest.catalog_service.get_service_access_rights", spec=True, side_effect=[ ServiceAccessRightsGet( @@ -229,12 +233,12 @@ async def test_accessible_through_product_group( client: TestClient, user_project: ProjectDict, mocker: MockerFixture, - logged_user: dict, + logged_user: UserInfoDict, ): for_gid = logged_user["primary_gid"] mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.catalog_client.get_service_access_rights", + "simcore_service_webserver.projects._controller.nodes_rest.catalog_service.get_service_access_rights", spec=True, side_effect=[ ServiceAccessRightsGet( @@ -288,12 +292,12 @@ async def test_accessible_for_one_service( client: TestClient, user_project: ProjectDict, mocker: MockerFixture, - logged_user: dict, + logged_user: UserInfoDict, ): for_gid = logged_user["primary_gid"] mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.catalog_client.get_service_access_rights", + "simcore_service_webserver.projects._controller.nodes_rest.catalog_service.get_service_access_rights", spec=True, side_effect=[ ServiceAccessRightsGet( @@ -348,10 +352,10 @@ async def test_not_accessible_for_more_services( client: TestClient, user_project: ProjectDict, mocker: MockerFixture, - logged_user: dict, + logged_user: UserInfoDict, ): mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.catalog_client.get_service_access_rights", + "simcore_service_webserver.projects._controller.nodes_rest.catalog_service.get_service_access_rights", spec=True, side_effect=[ ServiceAccessRightsGet( @@ -412,12 +416,12 @@ async def test_not_accessible_for_service_because_of_execute_access_false( client: TestClient, user_project: ProjectDict, mocker: MockerFixture, - logged_user: dict, + logged_user: UserInfoDict, ): for_gid = logged_user["primary_gid"] mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.catalog_client.get_service_access_rights", + "simcore_service_webserver.projects._controller.nodes_rest.catalog_service.get_service_access_rights", spec=True, side_effect=[ ServiceAccessRightsGet( @@ -461,3 +465,122 @@ async def test_not_accessible_for_service_because_of_execute_access_false( {"key": "simcore/services/comp/itis/sleeper", "version": "2.1.4"} ], } + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_project_services( + client: TestClient, + user_project: ProjectDict, + mocker: MockerFixture, + logged_user: UserInfoDict, +): + fake_services_in_project = [ + (sv["key"], sv["version"]) for sv in user_project["workbench"].values() + ] + + mocker.patch( + "simcore_service_webserver.catalog._service.catalog_rpc.batch_get_my_services", + spec=True, + return_value=[ + MyServiceGet( + key=service_key, + release=ServiceRelease( + version=service_version, + version_display=f"v{service_version}", + released="2023-01-01T00:00:00Z", + retired=None, + compatibility=None, + ), + owner=logged_user["primary_gid"], + my_access_rights={"execute": True, "write": False}, + ) + for service_key, service_version in fake_services_in_project + ], + ) + + assert client.app + + project_id = user_project["uuid"] + + expected_url = client.app.router["get_project_services"].url_for( + project_id=project_id + ) + assert URL(f"/v0/projects/{project_id}/nodes/-/services") == expected_url + + resp = await client.get(f"/v0/projects/{project_id}/nodes/-/services") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + assert data == { + "projectUuid": project_id, + "services": [ + { + "key": "simcore/services/comp/itis/sleeper", + "myAccessRights": {"execute": True, "write": False}, + "owner": logged_user["primary_gid"], + "release": { + "compatibility": None, + "released": "2023-01-01T00:00:00+00:00", + "retired": None, + "version": "2.1.4", + "versionDisplay": "v2.1.4", + }, + }, + { + "key": "simcore/services/frontend/parameter/integer", + "myAccessRights": {"execute": True, "write": False}, + "owner": logged_user["primary_gid"], + "release": { + "compatibility": None, + "released": "2023-01-01T00:00:00+00:00", + "retired": None, + "version": "1.0.0", + "versionDisplay": "v1.0.0", + }, + }, + { + "key": "simcore/services/comp/itis/sleeper", + "myAccessRights": {"execute": True, "write": False}, + "owner": logged_user["primary_gid"], + "release": { + "compatibility": None, + "released": "2023-01-01T00:00:00+00:00", + "retired": None, + "version": "2.1.5", + "versionDisplay": "v2.1.5", + }, + }, + ], + } + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_project_services_service_unavailable( + client: TestClient, + user_project: ProjectDict, + mocker: MockerFixture, + logged_user: UserInfoDict, +): + mocker.patch( + "simcore_service_webserver.catalog._service.catalog_rpc.batch_get_my_services", + spec=True, + side_effect=RPCServerError( + exc_message="Service Unavailable", + method_name="batch_get_my_services", + exc_type="Exception", + ), + ) + + assert client.app + + project_id = user_project["uuid"] + + expected_url = client.app.router["get_project_services"].url_for( + project_id=project_id + ) + assert URL(f"/v0/projects/{project_id}/nodes/-/services") == expected_url + + resp = await client.get(f"/v0/projects/{project_id}/nodes/-/services") + data, error = await assert_status(resp, status.HTTP_503_SERVICE_UNAVAILABLE) + + assert error + assert not data diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py index dad139ec8bb..3e59aebe177 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py @@ -15,7 +15,7 @@ from faker import Faker from models_library.api_schemas_clusters_keeper.ec2_instances import EC2InstanceTypeGet from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingUnitGet, + RutPricingUnitGet, ) from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock.plugin import MockerFixture @@ -97,8 +97,8 @@ def mock_rut_api_responses( assert client.app settings: ResourceUsageTrackerSettings = get_plugin_settings(client.app) - pricing_unit_get_base = PricingUnitGet.model_validate( - PricingUnitGet.model_config["json_schema_extra"]["examples"][0] + pricing_unit_get_base = RutPricingUnitGet.model_validate( + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][0] ) pricing_unit_get_1 = pricing_unit_get_base.model_copy() pricing_unit_get_1.pricing_unit_id = _PRICING_UNIT_ID_1 @@ -136,7 +136,7 @@ def _fake_instance_type_details( ] return mocker.patch( - "simcore_service_webserver.projects.projects_service.get_instance_type_details", + "simcore_service_webserver.projects._projects_service.get_instance_type_details", side_effect=_fake_instance_type_details, ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py b/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py new file mode 100644 index 00000000000..6dccd486a0b --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py @@ -0,0 +1,194 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from datetime import datetime, timedelta +from uuid import UUID + +import arrow +import pytest +from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole +from pytest_simcore.helpers.webserver_login import UserInfoDict +from simcore_service_webserver.projects import ( + _projects_repository as projects_service_repository, +) +from simcore_service_webserver.projects.exceptions import ProjectNotFoundError +from simcore_service_webserver.projects.models import ProjectDBGet, ProjectDict + + +@pytest.fixture +def user_role() -> UserRole: + return UserRole.USER + + +async def test_get_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + assert client.app + + # Get valid project + got_project = await projects_service_repository.get_project( + client.app, project_uuid=user_project["uuid"] + ) + + assert got_project.uuid == UUID(user_project["uuid"]) + assert got_project.name == user_project["name"] + assert got_project.description == user_project["description"] + + # Get non-existent project + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.get_project( + client.app, project_uuid=non_existent_project_uuid + ) + + +async def test_patch_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + assert client.app + + # This will change after in patched_project + creation_date = datetime.fromisoformat(user_project["creationDate"]) + last_change_date = datetime.fromisoformat(user_project["lastChangeDate"]) + assert abs(creation_date - last_change_date) < timedelta(seconds=1) + + # Patch valid project + patch_data = {"name": "Updated Project Name"} + patched_project = await projects_service_repository.patch_project( + client.app, + project_uuid=user_project["uuid"], + new_partial_project_data=patch_data, + ) + + assert patched_project.uuid == UUID(user_project["uuid"]) + assert patched_project.name == patch_data["name"] + assert patched_project.creation_date < patched_project.last_change_date + + # Patch non-existent project + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.patch_project( + client.app, + project_uuid=non_existent_project_uuid, + new_partial_project_data=patch_data, + ) + + +async def test_delete_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + assert client.app + + # Delete valid project + deleted_project = await projects_service_repository.delete_project( + client.app, project_uuid=user_project["uuid"] + ) + + assert deleted_project.uuid == UUID(user_project["uuid"]) + + # Check deleted + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.delete_project( + client.app, project_uuid=user_project["uuid"] + ) + + # Delete non-existent project + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + with pytest.raises(ProjectNotFoundError): + await projects_service_repository.delete_project( + client.app, project_uuid=non_existent_project_uuid + ) + + +@pytest.fixture +async def trashed_project( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +) -> ProjectDBGet: + assert client.app + + # Patch project to be trashed + trashed_at = arrow.utcnow().datetime + patch_data = { + "trashed": trashed_at, + "trashed_by": logged_user["id"], + "trashed_explicitly": True, + } + return await projects_service_repository.patch_project( + client.app, + project_uuid=user_project["uuid"], + new_partial_project_data=patch_data, + ) + + +async def test_list_trashed_projects(client: TestClient, trashed_project: ProjectDBGet): + assert client.app + + ( + total_count, + trashed_projects, + ) = await projects_service_repository.list_trashed_projects( + client.app, + trashed_explicitly=True, + trashed_before=arrow.utcnow().datetime + timedelta(days=1), + ) + + assert total_count == 1 + assert len(trashed_projects) == 1 + assert trashed_projects[0] == trashed_project + + +async def test_get_trashed_by_primary_gid( + client: TestClient, + logged_user: UserInfoDict, + trashed_project: ProjectDBGet, +): + assert client.app + + # Get trashed by primary gid + trashed_by_primary_gid = ( + await projects_service_repository.get_trashed_by_primary_gid( + client.app, + projects_uuid=trashed_project.uuid, + ) + ) + + assert trashed_by_primary_gid == logged_user["primary_gid"] + + +async def test_batch_get_trashed_by_primary_gid( + client: TestClient, + logged_user: UserInfoDict, + trashed_project: ProjectDBGet, +): + assert client.app + + non_existent_project_uuid = UUID("00000000-0000-0000-0000-000000000000") + + # Batch get trashed by primary gid + trashed_by_primary_gid = ( + await projects_service_repository.batch_get_trashed_by_primary_gid( + client.app, + projects_uuids=[ + trashed_project.uuid, + non_existent_project_uuid, # non-existent + trashed_project.uuid, # repeated + ], + ) + ) + + assert trashed_by_primary_gid == [ + logged_user["primary_gid"], + None, + logged_user["primary_gid"], + ] diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 3963a10bf7b..3d4b26894b8 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -453,7 +453,7 @@ async def test_open_project__in_debt( added_wallet, _ = await assert_status(resp, status.HTTP_201_CREATED) mock_get_project_wallet_total_credits = mocker.patch( - "simcore_service_webserver.projects._wallets_api.credit_transactions.get_project_wallet_total_credits", + "simcore_service_webserver.projects._wallets_service.credit_transactions.get_project_wallet_total_credits", spec=True, return_value=WalletTotalCredits( wallet_id=added_wallet["walletId"], @@ -1073,7 +1073,7 @@ async def test_project_node_lifetime( # noqa: PLR0915 create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], ): mock_storage_api_delete_data_folders_of_project_node = mocker.patch( - "simcore_service_webserver.projects._crud_handlers.projects_service.storage_api.delete_data_folders_of_project_node", + "simcore_service_webserver.projects._projects_service.storage_service.delete_data_folders_of_project_node", return_value="", ) assert client.app diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py index 07a447de907..436581d9de3 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_wallet_handlers.py @@ -111,7 +111,7 @@ def mock_get_project_wallet_total_credits( mocker: MockerFixture, setup_wallets_db: list[WalletGet] ): mocker.patch( - "simcore_service_webserver.projects._wallets_api.credit_transactions.get_project_wallet_total_credits", + "simcore_service_webserver.projects._wallets_service.credit_transactions.get_project_wallet_total_credits", spec=True, return_value=WalletTotalCredits( wallet_id=setup_wallets_db[0].wallet_id, available_osparc_credits=Decimal(0) @@ -122,7 +122,7 @@ def mock_get_project_wallet_total_credits( @pytest.fixture def mock_get_service_run_page(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._wallets_api.service_runs.get_service_run_page", + "simcore_service_webserver.projects._wallets_service.service_runs.get_service_run_page", spec=True, return_value=ServiceRunPage(items=[], total=0), ) @@ -181,7 +181,7 @@ async def test_project_wallets_full_workflow( @pytest.fixture def mock_pay_project_debt(mocker: MockerFixture): return mocker.patch( - "simcore_service_webserver.projects._wallets_api.credit_transactions.pay_project_debt", + "simcore_service_webserver.projects._wallets_service.credit_transactions.pay_project_debt", spec=True, ) diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py b/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py index 48d8f1ac41f..475dc12812b 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py @@ -29,7 +29,8 @@ InvitationsSettings, get_plugin_settings, ) -from simcore_service_webserver.products.api import Product, list_products +from simcore_service_webserver.products import products_service +from simcore_service_webserver.products.models import Product from yarl import URL @@ -52,7 +53,7 @@ def invitations_service_openapi_specs( @pytest.fixture def current_product(client: TestClient) -> Product: assert client.app - products = list_products(client.app) + products = products_service.list_products(client.app) assert products assert products[0].name == "osparc" return products[0] @@ -192,7 +193,6 @@ def app_environment( "WEBSERVER_DIAGNOSTICS": "null", "WEBSERVER_EXPORTER": "null", "WEBSERVER_GARBAGE_COLLECTOR": "null", - "WEBSERVER_META_MODELING": "0", "WEBSERVER_NOTIFICATIONS": "0", "WEBSERVER_PUBLICATIONS": "0", "WEBSERVER_REMOTE_DEBUG": "0", @@ -200,7 +200,6 @@ def app_environment( "WEBSERVER_STUDIES_ACCESS_ENABLED": "0", "WEBSERVER_TAGS": "0", "WEBSERVER_TRACING": "null", - "WEBSERVER_VERSION_CONTROL": "0", "WEBSERVER_WALLETS": "0", # set INVITATIONS_* variables using those in .env-devel **{ diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py index ad31dda87c3..030a88e55cc 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py @@ -20,7 +20,7 @@ InvalidInvitationError, InvitationsServiceUnavailableError, ) -from simcore_service_webserver.products.api import Product +from simcore_service_webserver.products.models import Product from yarl import URL diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py index 8c4daca29df..7a081e39cb6 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py @@ -113,7 +113,7 @@ def _extract_invitation_code_from_url(invitation_url: HttpUrl) -> str: @pytest.mark.acceptance_test() async def test_registration_to_different_product( mocker: MockerFixture, - all_products_names: list[ProductName], + app_products_names: list[ProductName], client: TestClient, guest_email: str, guest_password: str, @@ -146,8 +146,8 @@ async def _register_account(invitation_url: HttpUrl, product_deployed: ProductNa headers={X_PRODUCT_NAME_HEADER: product_deployed}, ) - product_a = all_products_names[0] - product_b = all_products_names[1] + product_a = app_products_names[0] + product_b = app_products_names[1] # PO creates an two invitations for guest in product A and product B invitation_product_a = await generate_invitation( diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_products_rest_invitations.py similarity index 97% rename from services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py rename to services/web/server/tests/unit/with_dbs/03/invitations/test_products_rest_invitations.py index 9f347239acd..f384bbe46fb 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_products_rest_invitations.py @@ -11,8 +11,8 @@ import pytest from aiohttp.test_utils import TestClient from faker import Faker -from models_library.api_schemas_webserver.product import ( - GenerateInvitation, +from models_library.api_schemas_webserver.products import ( + InvitationGenerate, InvitationGenerated, ) from models_library.invitations import _MAX_LEN @@ -83,7 +83,7 @@ async def test_product_owner_generates_invitation( ): before_dt = datetime.now(tz=UTC) - request_model = GenerateInvitation( + request_model = InvitationGenerate( guest=guest_email, trial_account_days=trial_account_days, extra_credits_in_usd=extra_credits_in_usd, @@ -146,7 +146,7 @@ async def test_pre_registration_and_invitation_workflow( "country": faker.country(), } - invitation = GenerateInvitation( + invitation = InvitationGenerate( guest=guest_email, trial_account_days=None, extra_credits_in_usd=10, diff --git a/services/web/server/tests/unit/with_dbs/03/login/conftest.py b/services/web/server/tests/unit/with_dbs/03/login/conftest.py index b3f8049ff51..c0eaf628d2e 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/login/conftest.py @@ -32,7 +32,6 @@ def app_environment( "WEBSERVER_EXPORTER": "null", "WEBSERVER_GARBAGE_COLLECTOR": "null", "WEBSERVER_GROUPS": "1", - "WEBSERVER_META_MODELING": "0", "WEBSERVER_NOTIFICATIONS": "0", "WEBSERVER_PRODUCTS": "1", "WEBSERVER_PUBLICATIONS": "0", @@ -40,7 +39,6 @@ def app_environment( "WEBSERVER_SOCKETIO": "1", # for login notifications "WEBSERVER_STUDIES_DISPATCHER": "null", "WEBSERVER_TAGS": "1", - "WEBSERVER_VERSION_CONTROL": "0", "WEBSERVER_WALLETS": "1", "WEBSERVER_TRACING": "null", }, diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py index 29324b2af23..588e95182b6 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_2fa.py @@ -32,10 +32,12 @@ ) from simcore_service_webserver.login._constants import ( CODE_2FA_SMS_CODE_REQUIRED, - MSG_2FA_UNAVAILABLE_OEC, + MSG_2FA_UNAVAILABLE, ) from simcore_service_webserver.login.storage import AsyncpgStorage -from simcore_service_webserver.products.api import Product, get_current_product +from simcore_service_webserver.products import products_web +from simcore_service_webserver.products.errors import UnknownProductError +from simcore_service_webserver.products.models import Product from simcore_service_webserver.users import preferences_api as user_preferences_api from twilio.base.exceptions import TwilioRestException @@ -299,7 +301,7 @@ def _get_confirmation_link_from_email(): }, ) data, _ = await assert_status(response, status.HTTP_200_OK) - assert data["message"] == "You are logged in" + assert "logged in" in data["message"] async def test_can_register_same_phone_in_different_accounts( @@ -356,7 +358,7 @@ async def test_can_register_same_phone_in_different_accounts( ) data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert data - assert "Code" in data["message"] + assert "SMS" in data["message"] assert data["name"] == CODE_2FA_SMS_CODE_REQUIRED assert not error @@ -369,9 +371,9 @@ async def test_send_email_code( ): request = make_mocked_request("GET", "/dummy", app=client.app) - with pytest.raises(KeyError): + with pytest.raises(UnknownProductError): # NOTE: this is a fake request and did not go through middlewares - get_current_product(request) + products_web.get_current_product(request) user_email = faker.email() support_email = faker.email() @@ -417,9 +419,9 @@ async def test_2fa_sms_failure_during_login( ): assert client.app - # Mocks error in graylog https://monitoring.osparc.io/graylog/search/649e7619ce6e0838a96e9bf1?q=%222FA%22&rangetype=relative&from=172800 mocker.patch( - "simcore_service_webserver.login._2fa_api.TwilioSettings.is_alphanumeric_supported", + # MD: Emulates error in graylog https://monitoring.osparc.io/graylog/search/649e7619ce6e0838a96e9bf1?q=%222FA%22&rangetype=relative&from=172800 + "simcore_service_webserver.login._2fa_api.twilio.rest.Client", autospec=True, side_effect=TwilioRestException( status=400, @@ -454,9 +456,7 @@ async def test_2fa_sms_failure_during_login( response, status.HTTP_503_SERVICE_UNAVAILABLE ) assert not data - assert error["errors"][0]["message"].startswith( - MSG_2FA_UNAVAILABLE_OEC[:10] - ) + assert error["errors"][0]["message"].startswith(MSG_2FA_UNAVAILABLE[:10]) # Expects logs like 'Failed while setting up 2FA code and sending SMS to 157XXXXXXXX3 [OEC:140392495277888]' assert f"{fake_user_phone_number[:3]}" in caplog.text diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py index 7d16e912414..97042d6ed1c 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py @@ -14,7 +14,7 @@ from pytest_simcore.helpers.webserver_login import NewUser from servicelib.aiohttp import status from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME -from simcore_service_webserver._constants import APP_SETTINGS_KEY +from simcore_service_webserver.constants import APP_SETTINGS_KEY from simcore_service_webserver.db.models import UserStatus from simcore_service_webserver.login._constants import ( MSG_ACTIVATION_REQUIRED, diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py index 77b6bbd0b0e..c256edb25cb 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py @@ -7,7 +7,7 @@ from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, parse_link from servicelib.aiohttp import status -from simcore_service_webserver._constants import INDEX_RESOURCE_NAME +from simcore_service_webserver.constants import INDEX_RESOURCE_NAME from simcore_service_webserver.login._constants import ( MSG_CHANGE_EMAIL_REQUESTED, MSG_LOGGED_IN, diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py index a171ec63ae2..f496f0545d8 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py @@ -8,7 +8,7 @@ from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import LoggedUser from servicelib.aiohttp import status -from servicelib.aiohttp.rest_responses import unwrap_envelope +from servicelib.rest_responses import unwrap_envelope from simcore_service_webserver.login._constants import ( MSG_LOGGED_IN, MSG_PASSWORD_CHANGED, diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py index 0ece8630d0f..1abc63ac9f5 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py @@ -16,7 +16,7 @@ from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from pytest_simcore.helpers.webserver_login import NewInvitation, NewUser, parse_link from servicelib.aiohttp import status -from servicelib.aiohttp.rest_responses import unwrap_envelope +from servicelib.rest_responses import unwrap_envelope from simcore_service_webserver.db.models import UserStatus from simcore_service_webserver.groups.api import auto_add_user_to_product_group from simcore_service_webserver.login._confirmation import _url_for_confirmation diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py index 03b543e9038..5d019f4fb57 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py @@ -18,7 +18,7 @@ from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.login._constants import MSG_USER_DELETED -from simcore_service_webserver.products.api import get_product +from simcore_service_webserver.products.products_service import get_product @pytest.mark.parametrize( diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py index a5c95ba7c3b..ea121adb288 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py @@ -1,29 +1,38 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable +# pylint: disable=too-many-arguments + import asyncio -from collections.abc import Callable +import contextlib +from collections.abc import AsyncIterator, Callable import pytest from aiohttp.test_utils import TestClient, TestServer +from models_library.products import ProductName +from pytest_mock import MockType from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import NewUser, parse_link, parse_test_marks from servicelib.aiohttp import status +from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from servicelib.utils_secrets import generate_password from simcore_service_webserver.db.models import ConfirmationAction, UserStatus +from simcore_service_webserver.groups import api as groups_service from simcore_service_webserver.login._constants import ( MSG_ACTIVATION_REQUIRED, MSG_EMAIL_SENT, MSG_LOGGED_IN, - MSG_OFTEN_RESET_PASSWORD, MSG_PASSWORD_CHANGED, - MSG_UNKNOWN_EMAIL, MSG_USER_BANNED, MSG_USER_EXPIRED, ) from simcore_service_webserver.login.settings import LoginOptions -from simcore_service_webserver.login.storage import AsyncpgStorage +from simcore_service_webserver.login.storage import ( + AsyncpgStorage, + ConfirmationTokenDict, +) +from simcore_service_webserver.users import api as users_service from yarl import URL # @@ -40,20 +49,97 @@ def client( event_loop: asyncio.AbstractEventLoop, aiohttp_client: Callable, - web_server: TestServer, - mock_orphaned_services, + app_products_names: list[ProductName], + disabled_setup_garbage_collector: MockType, mocked_email_core_remove_comments: None, + # fixtures above must run before `web_server` + web_server: TestServer, ) -> TestClient: + assert app_products_names return event_loop.run_until_complete(aiohttp_client(web_server)) +async def test_two_steps_action_confirmation_workflow( + client: TestClient, + login_options: LoginOptions, + capsys: pytest.CaptureFixture, + caplog: pytest.LogCaptureFixture, +): + assert client.app + + async with NewUser(app=client.app) as user: + reset_url = client.app.router["initiate_reset_password"].url_for() + response = await client.post( + f"{reset_url}", + json={ + "email": user["email"], + }, + ) + assert response.url.path == reset_url.path + await assert_status(response, status.HTTP_200_OK, MSG_EMAIL_SENT.format(**user)) + + # Email is printed in the out + out, _ = capsys.readouterr() + confirmation_url = parse_link(out) + code = URL(confirmation_url).parts[-1] + + # Emulates USER clicks on email's link + response = await client.get(confirmation_url) + assert response.status == 200 + assert ( + response.url.path_qs + == URL(login_options.LOGIN_REDIRECT) + .with_fragment(f"reset-password?code={code}") + .path_qs + ), "Should redirect to front-end with special fragment" + + # Emulates FRONT-END: + # SEE api/specs/webserver/v0/components/schemas/auth.yaml#/ResetPasswordForm + complete_reset_password_url = client.app.router[ + "complete_reset_password" + ].url_for(code=code) + new_password = generate_password(10) + response = await client.post( + f"{complete_reset_password_url}", + json={ + "password": new_password, + "confirm": new_password, + }, + ) + await assert_status(response, status.HTTP_200_OK, MSG_PASSWORD_CHANGED) + assert response.url.path == complete_reset_password_url.path + + # Try NEW password + logout_url = client.app.router["auth_logout"].url_for() + response = await client.post(f"{logout_url}") + assert response.url.path == logout_url.path + await assert_status(response, status.HTTP_401_UNAUTHORIZED, "Unauthorized") + + login_url = client.app.router["auth_login"].url_for() + response = await client.post( + f"{login_url}", + json={ + "email": user["email"], + "password": new_password, + }, + ) + await assert_status(response, status.HTTP_200_OK, MSG_LOGGED_IN) + assert response.url.path == login_url.path + + # Ensure there are no warnings + assert not any( + record.levelname == "WARNING" for record in caplog.records + ), "Unexpected warnings found" + + async def test_unknown_email( client: TestClient, capsys: pytest.CaptureFixture, + caplog: pytest.LogCaptureFixture, fake_user_email: str, ): assert client.app - reset_url = client.app.router["auth_reset_password"].url_for() + reset_url = client.app.router["initiate_reset_password"].url_for() response = await client.post( f"{reset_url}", @@ -66,8 +152,18 @@ async def test_unknown_email( response, status.HTTP_200_OK, MSG_EMAIL_SENT.format(email=fake_user_email) ) + # email is not sent out, _ = capsys.readouterr() - assert parse_test_marks(out)["reason"] == MSG_UNKNOWN_EMAIL + assert not parse_test_marks(out), "Expected no email to be sent" + + # Check logger warning + logged_warnings = [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + + assert any( + message.startswith("Password reset initiated") for message in logged_warnings + ), f"Missing warning in {logged_warnings}" @pytest.mark.parametrize( @@ -80,11 +176,12 @@ async def test_unknown_email( async def test_blocked_user( client: TestClient, capsys: pytest.CaptureFixture, + caplog: pytest.LogCaptureFixture, user_status: UserStatus, expected_msg: str, ): assert client.app - reset_url = client.app.router["auth_reset_password"].url_for() + reset_url = client.app.router["initiate_reset_password"].url_for() async with NewUser({"status": user_status.name}, app=client.app) as user: response = await client.post( @@ -97,14 +194,26 @@ async def test_blocked_user( assert response.url.path == reset_url.path await assert_status(response, status.HTTP_200_OK, MSG_EMAIL_SENT.format(**user)) + # email is not sent out, _ = capsys.readouterr() + assert not parse_test_marks(out), "Expected no email to be sent" + # expected_msg contains {support_email} at the end of the string - assert expected_msg[:-20] in parse_test_marks(out)["reason"] + logged_warnings = [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + assert any( + message.startswith("Password reset initiated") and expected_msg[:10] in message + for message in logged_warnings + ), f"Missing warning in {logged_warnings}" -async def test_inactive_user(client: TestClient, capsys: pytest.CaptureFixture): + +async def test_inactive_user( + client: TestClient, capsys: pytest.CaptureFixture, caplog: pytest.LogCaptureFixture +): assert client.app - reset_url = client.app.router["auth_reset_password"].url_for() + reset_url = client.app.router["initiate_reset_password"].url_for() async with NewUser( {"status": UserStatus.CONFIRMATION_PENDING.name}, app=client.app @@ -119,97 +228,86 @@ async def test_inactive_user(client: TestClient, capsys: pytest.CaptureFixture): assert response.url.path == reset_url.path await assert_status(response, status.HTTP_200_OK, MSG_EMAIL_SENT.format(**user)) + # email is not sent out, _ = capsys.readouterr() - assert parse_test_marks(out)["reason"] == MSG_ACTIVATION_REQUIRED + assert not parse_test_marks(out), "Expected no email to be sent" + + logged_warnings = [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + + assert any( + message.startswith("Password reset initiated") + and MSG_ACTIVATION_REQUIRED[:20] in message + for message in logged_warnings + ), f"Missing warning in {logged_warnings}" -async def test_too_often( +@pytest.fixture +def other_product_name( + app_products_names: list[ProductName], + default_product_name: ProductName, +) -> ProductName: + return next(name for name in app_products_names if name != default_product_name) + + +async def test_unregistered_product( + default_product_name: ProductName, + other_product_name: ProductName, client: TestClient, - db: AsyncpgStorage, capsys: pytest.CaptureFixture, + caplog: pytest.LogCaptureFixture, ): assert client.app - reset_url = client.app.router["auth_reset_password"].url_for() async with NewUser(app=client.app) as user: - confirmation = await db.create_confirmation( - user["id"], ConfirmationAction.RESET_PASSWORD.name + + # allow in + await groups_service.auto_add_user_to_product_group( + client.app, user_id=user["id"], product_name=default_product_name ) - response = await client.post( - f"{reset_url}", - json={ - "email": user["email"], - }, + assert await users_service.is_user_in_product( + client.app, user_id=user["id"], product_name=default_product_name + ) + assert not await users_service.is_user_in_product( + client.app, user_id=user["id"], product_name=other_product_name ) - await db.delete_confirmation(confirmation) - - assert response.url.path == reset_url.path - await assert_status(response, status.HTTP_200_OK, MSG_EMAIL_SENT.format(**user)) - - out, _ = capsys.readouterr() - assert parse_test_marks(out)["reason"] == MSG_OFTEN_RESET_PASSWORD - - -async def test_reset_and_confirm( - client: TestClient, login_options: LoginOptions, capsys: pytest.CaptureFixture -): - assert client.app - async with NewUser(app=client.app) as user: - reset_url = client.app.router["auth_reset_password"].url_for() + # Simulate user registered in a different product + reset_url = client.app.router["initiate_reset_password"].url_for() response = await client.post( f"{reset_url}", json={ "email": user["email"], }, + headers={X_PRODUCT_NAME_HEADER: other_product_name}, ) assert response.url.path == reset_url.path await assert_status(response, status.HTTP_200_OK, MSG_EMAIL_SENT.format(**user)) - out, err = capsys.readouterr() - confirmation_url = parse_link(out) - code = URL(confirmation_url).parts[-1] + # Email is printed in the out + out, _ = capsys.readouterr() + assert not parse_test_marks(out), "Expected no email to be sent" - # emulates user click on email url - response = await client.get(confirmation_url) - assert response.status == 200 - assert ( - response.url.path_qs - == URL(login_options.LOGIN_REDIRECT) - .with_fragment(f"reset-password?code={code}") - .path_qs - ) + # expected_msg contains {support_email} at the end of the string + logged_warnings = [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] - # api/specs/webserver/v0/components/schemas/auth.yaml#/ResetPasswordForm - reset_allowed_url = client.app.router["auth_reset_password_allowed"].url_for( - code=code - ) - new_password = generate_password(10) - response = await client.post( - f"{reset_allowed_url}", - json={ - "password": new_password, - "confirm": new_password, - }, - ) - payload = await response.json() - assert response.status == 200, payload - assert response.url.path == reset_allowed_url.path - await assert_status(response, status.HTTP_200_OK, MSG_PASSWORD_CHANGED) + assert any( + message.startswith("Password reset initiated") + for message in logged_warnings + ), f"Missing warning in {logged_warnings}" - # Try new password - logout_url = client.app.router["auth_logout"].url_for() - response = await client.post(f"{logout_url}") - assert response.url.path == logout_url.path - await assert_status(response, status.HTTP_401_UNAUTHORIZED, "Unauthorized") - login_url = client.app.router["auth_login"].url_for() - response = await client.post( - f"{login_url}", - json={ - "email": user["email"], - "password": new_password, - }, - ) - assert response.url.path == login_url.path - await assert_status(response, status.HTTP_200_OK, MSG_LOGGED_IN) +@contextlib.asynccontextmanager +async def confirmation_ctx( + db: AsyncpgStorage, user +) -> AsyncIterator[ConfirmationTokenDict]: + confirmation = await db.create_confirmation( + user["id"], ConfirmationAction.RESET_PASSWORD.name + ) + + yield confirmation + + await db.delete_confirmation(confirmation) diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_utils_emails.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_utils_emails.py index e5e417bb8fc..1eb7f810faa 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_utils_emails.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_utils_emails.py @@ -11,11 +11,10 @@ from aiohttp import web from aiohttp.test_utils import make_mocked_request from faker import Faker -from json2html import json2html from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_service_webserver._constants import RQ_PRODUCT_KEY from simcore_service_webserver.application_settings import setup_settings +from simcore_service_webserver.constants import RQ_PRODUCT_KEY from simcore_service_webserver.email.plugin import setup_email from simcore_service_webserver.login.plugin import setup_login from simcore_service_webserver.login.utils_email import ( @@ -23,6 +22,7 @@ get_template_path, send_email_from_template, ) +from simcore_service_webserver.publications._utils import json2html from simcore_service_webserver.statics._constants import FRONTEND_APPS_AVAILABLE @@ -109,22 +109,6 @@ async def test_render_and_send_mail_for_password( ): link = faker.url() # some url link - await send_email_from_template( - http_request, - from_=f"no-reply@{product_name}.test", - to=destination_email, - template=await get_template_path( - http_request, "reset_password_email_failed.jinja2" - ), - context={ - "host": http_request.host, - "reason": faker.text(), - "product": SimpleNamespace( - display_name=product_name.capitalize(), name=product_name - ), - }, - ) - await send_email_from_template( http_request, from_=f"no-reply@{product_name}.test", diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py deleted file mode 100644 index d7e3dc7529e..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py +++ /dev/null @@ -1,43 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments -import pytest -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_postgres_database.models.users import UserRole - - -@pytest.fixture -def user_role() -> UserRole: - return UserRole.TESTER - - -@pytest.fixture -def app_environment( - monkeypatch: pytest.MonkeyPatch, - app_environment: EnvVarsDict, -) -> EnvVarsDict: - return app_environment | setenvs_from_dict( - monkeypatch, - { - # exclude - "WEBSERVER_ACTIVITY": "null", - "WEBSERVER_CLUSTERS": "null", - "WEBSERVER_COMPUTATION": "null", - "WEBSERVER_DIAGNOSTICS": "null", - "WEBSERVER_GROUPS": "0", - "WEBSERVER_PUBLICATIONS": "0", - "WEBSERVER_GARBAGE_COLLECTOR": "null", - "WEBSERVER_EMAIL": "null", - "WEBSERVER_SOCKETIO": "0", - "WEBSERVER_STORAGE": "null", - "WEBSERVER_STUDIES_DISPATCHER": "null", - "WEBSERVER_TAGS": "0", - "WEBSERVER_TRACING": "null", - # Module under test - "WEBSERVER_DEV_FEATURES_ENABLED": "1", - "WEBSERVER_VERSION_CONTROL": "1", - "WEBSERVER_META_MODELING": "1", - }, - ) diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_function_nodes.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_function_nodes.py deleted file mode 100644 index d9a487e0075..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_function_nodes.py +++ /dev/null @@ -1,25 +0,0 @@ -# pylint: disable=protected-access -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -import collections.abc -import inspect -from typing import get_origin - -from simcore_service_webserver.meta_modeling._function_nodes import catalog - -# TODO: test i/o schemas on FRONTEND_SERVICES_CATALOG fit the _fun Callable - - -def test_frontend_service_to_callable_registry(): - - print(f"\n{len(catalog)=}") - - for (node_key, node_version), func in catalog._items(): - if node_call := func.implementation: - print(" -", node_key, node_version, node_call.__name__) - assert ( - get_origin(inspect.signature(node_call).return_annotation) - is collections.abc.Iterator - ), f"Expected iterable nodes only {(node_key, node_version)=}" diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py deleted file mode 100644 index e00b67c0673..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py +++ /dev/null @@ -1,312 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -from collections.abc import Awaitable, Callable -from typing import Any - -import pytest -from aiohttp import ClientResponse -from aiohttp.test_utils import TestClient -from common_library.json_serialization import json_dumps, json_loads -from faker import Faker -from models_library.projects import Project -from models_library.projects_nodes import Node -from models_library.services_resources import ServiceResourcesDict -from pytest_mock import MockerFixture -from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import UserInfoDict -from pytest_simcore.simcore_webserver_projects_rest_api import ( - NEW_PROJECT, - REPLACE_PROJECT_ON_MODIFIED, - RUN_PROJECT, -) -from servicelib.aiohttp import status -from simcore_postgres_database.models.projects import projects -from simcore_service_webserver._constants import APP_AIOPG_ENGINE_KEY -from simcore_service_webserver.director_v2.api import get_project_run_policy -from simcore_service_webserver.meta_modeling._handlers import ( - Page, - ProjectIterationItem, - ProjectIterationResultItem, -) -from simcore_service_webserver.meta_modeling._projects import ( - meta_project_policy, - projects_redirection_middleware, -) -from simcore_service_webserver.projects.db import ProjectDBAPI -from simcore_service_webserver.projects.models import ProjectDict - -REQUEST_MODEL_POLICY = { - "by_alias": True, - "exclude_defaults": True, - "exclude_none": True, # e.g. thumbnail: None will fail validation TODO: remove when new project model is in place. It might lead to wrong errors - "exclude_unset": True, -} - - -@pytest.fixture -def app_environment( - app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - envs_plugins = setenvs_from_dict( - monkeypatch, - { - "WEBSERVER_RABBITMQ": "null", - }, - ) - return app_environment | envs_plugins - - -@pytest.fixture -async def context_with_logged_user(client: TestClient, logged_user: UserInfoDict): - yield - - assert client.app - engine = client.app[APP_AIOPG_ENGINE_KEY] - async with engine.acquire() as conn: - # cascade deletes everything except projects_vc_snapshot - await conn.execute(projects.delete()) - - -@pytest.mark.skip( - reason="Blocking testing. Will follow up in https://github.com/ITISFoundation/osparc-simcore/issues/6976 " -) -@pytest.mark.acceptance_test() -async def test_iterators_workflow( - client: TestClient, - logged_user: UserInfoDict, - primary_group: dict[str, Any], - context_with_logged_user: None, - mocker: MockerFixture, - faker: Faker, - mock_dynamic_scheduler: None, - director_v2_service_mock: None, - request_create_project: Callable[..., Awaitable[ProjectDict]], -): - # pylint: disable=too-many-statements - - # - # NOTE: all TODOs below shall be addressed in next version of the iterator - # SEE SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735 - # - - response: ClientResponse - - # check init meta is correct - assert client.app - assert projects_redirection_middleware in client.app.middlewares - assert get_project_run_policy(client.app) == meta_project_policy - - # NEW project -------------------------------------------------------------- - mocker.patch( - "simcore_service_webserver.director_v2.api.create_or_update_pipeline", - return_value=None, - ) - mocker.patch( - "simcore_service_webserver.director_v2.api.get_computation_task", - return_value=None, - ) - mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.projects_api.is_service_deprecated", - autospec=True, - return_value=False, - ) - mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.projects_api.catalog_client.get_service_resources", - autospec=True, - return_value=ServiceResourcesDict(), - ) - # ---- - project_data = await request_create_project( - client, - status.HTTP_202_ACCEPTED, - status.HTTP_201_CREATED, - logged_user, - primary_group, - project=NEW_PROJECT.request_payload, - ) - - project_uuid = project_data["uuid"] - - # CREATE meta-project: iterator 0:3 -> sleeper -> sleeper_2 --------------- - modifications = REPLACE_PROJECT_ON_MODIFIED.request_payload - assert modifications - create_node_url = client.app.router["create_node"].url_for( - project_id=project_data["uuid"] - ) - for node_id, node_data in modifications["workbench"].items(): - node = Node.model_validate(node_data) - response = await client.post( - f"{create_node_url}", - json={ - "service_key": node.key, - "service_version": node.version, - "service_id": f"{node_id}", - }, - ) - assert response.status == status.HTTP_201_CREATED - project_data.update({key: modifications[key] for key in ("workbench", "ui")}) - project_data["ui"].setdefault("currentNodeId", project_uuid) - - db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(client.app) - project_data.pop("state") - await db.replace_project( - project_data, - logged_user["id"], - project_uuid=project_uuid, - product_name="osparc", - ) - - # TODO: create iterations, so user could explore parametrizations? - - # RUN metaproject ---------------------------------------------------------- - async def _mock_start(project_id, user_id, product_name, **options): - return f"{project_id}" - - mocker.patch( - "simcore_service_webserver.director_v2._core_computations.ComputationsApi.start", - side_effect=_mock_start, - ) - # ---- - - response = await client.post( - f"/v0/computations/{project_uuid}:start", - json=RUN_PROJECT.request_payload, - ) - data, _ = await assert_status(response, status.HTTP_201_CREATED) - assert project_uuid == data["pipeline_id"] - ref_ids = data["ref_ids"] - assert len(ref_ids) == 3 - - # TODO: check: has auto-commited - # TODO: check: has iterations as branches - # TODO: retrieve results of iter1 - - # GET iterations ---------------------------------------------------------- - response = await client.get(f"/v0/repos/projects/{project_uuid}/checkpoints/HEAD") - body = await response.json() - head_ref_id = body["data"]["id"] - - assert head_ref_id == 1 - - response = await client.get( - f"/v0/projects/{project_uuid}/checkpoint/{head_ref_id}/iterations?offset=0" - ) - body = await response.json() - first_iterlist = Page[ProjectIterationItem].model_validate(body).data - - assert len(first_iterlist) == 3 - - # GET workcopy project for iter 0 ---------------------------------------------- - async def _mock_catalog_get(*args, **kwarg): - return [ - {"key": s["key"], "version": s["version"]} - for _, s in project_data["workbench"].items() - ] + [{"key": "simcore/services/frontend/parameter/integer", "version": "1.0.0"}] - - mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", - side_effect=_mock_catalog_get, - autospec=True, - ) - # extract outputs - for i, prj_iter in enumerate(first_iterlist): - assert prj_iter.workcopy_project_url.path - response = await client.get(prj_iter.workcopy_project_url.path) - assert response.status == status.HTTP_200_OK - - body = await response.json() - project_iter0 = body["data"] - - outputs = {} - for nid, node in project_iter0["workbench"].items(): - if out := node.get("outputs"): - outputs[nid] = out - - assert len(outputs) == 1 - assert outputs["fc9208d9-1a0a-430c-9951-9feaf1de3368"]["out_1"] == i - - # ---------------------------------------------- - - # GET results of all iterations - # /projects/{project_uuid}/checkpoint/{ref_id}/iterations/-/results - response = await client.get( - f"/v0/projects/{project_uuid}/checkpoint/{head_ref_id}/iterations/-/results" - ) - assert response.status == status.HTTP_200_OK, await response.text() - body = await response.json() - - assert Page[ProjectIterationResultItem].model_validate(body).data is not None - - # GET project and MODIFY iterator values---------------------------------------------- - # - Change iterations from 0:4 -> HEAD+1 - response = await client.get(f"/v0/projects/{project_uuid}") - assert response.status == status.HTTP_200_OK, await response.text() - body = await response.json() - - # NOTE: updating a project fields can be daunting because - # it combines nested field attributes with dicts and from the - # json you cannot distinguish easily what-is-what automatically - # Dict keys are usually some sort of identifier, typically a UUID or - # and index but nothing prevents a dict from using any other type of key types - # - project = Project.model_validate(body["data"]) - new_project = project.model_copy( - update={ - # TODO: HACK to overcome export from None -> string - # SOLUTION 1: thumbnail should not be required (check with team!) - # SOLUTION 2: make thumbnail nullable - "thumbnail": faker.image_url(), - } - ) - assert new_project.workbench is not None - assert new_project.workbench - node = new_project.workbench["fc9208d9-1a0a-430c-9951-9feaf1de3368"] - assert node.inputs - node.inputs["linspace_stop"] = 4 - - _new_project_data = new_project.model_dump(**REQUEST_MODEL_POLICY) - _new_project_data.pop("state") - await db.replace_project( - json_loads(json_dumps(_new_project_data)), - logged_user["id"], - project_uuid=project_uuid, - product_name="osparc", - ) - - # RUN again them --------------------------------------------------------------------------- - response = await client.post( - f"/v0/computations/{project_uuid}:start", - json=RUN_PROJECT.request_payload, - ) - data, _ = await assert_status(response, status.HTTP_201_CREATED) - assert project_uuid == data["pipeline_id"] - ref_ids = data["ref_ids"] - assert len(ref_ids) == 4 - - # GET iterations ----------------------------------------------------------------- - # check iters 1, 2 and 3 share working copies - # - response = await client.get(f"/v0/repos/projects/{project_uuid}/checkpoints/HEAD") - body = await response.json() - head_ref_id = body["data"]["id"] - - assert head_ref_id == 5 - - response = await client.get( - f"/v0/projects/{project_uuid}/checkpoint/{head_ref_id}/iterations?offset=0" - ) - body = await response.json() - assert response.status == status.HTTP_200_OK, f"{body=}" # nosec - second_iterlist = Page[ProjectIterationItem].model_validate(body).data - - assert len(second_iterlist) == 4 - assert len({it.workcopy_project_id for it in second_iterlist}) == len( - second_iterlist - ), "unique" - - # TODO: cached iterations will be implemented in next PR - # for i in range(len(first_iterlist)): - # assert second_iterlist[i].workcopy_project_id == first_iterlist[i].workcopy_project_id diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_results.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_results.py deleted file mode 100644 index fdec06806ed..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_results.py +++ /dev/null @@ -1,138 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - - -import json -from typing import Any - -import pytest -from pydantic import BaseModel -from simcore_service_webserver.meta_modeling._results import ( - ExtractedResults, - extract_project_results, -) - - -@pytest.fixture -def fake_workbench() -> dict[str, Any]: - return { - "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": { - "key": "simcore/services/frontend/data-iterator/funky-range", - "version": "1.0.0", - "label": "Integer iterator", - "inputs": {"linspace_start": 0, "linspace_stop": 2, "linspace_step": 1}, - "inputNodes": [], - # some funky output of iterator/param, - "outputs": {"out_1": 1, "out_2": [3, 4]}, - }, - "e33c6880-1b1d-4419-82d7-270197738aa9": { - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "label": "sleeper", - "inputs": { - "input_2": { - "nodeUuid": "0f1e38c9-dcb7-443c-a745-91b97ac28ccc", - "output": "out_1", - }, - "input_3": False, - }, - "inputNodes": ["0f1e38c9-dcb7-443c-a745-91b97ac28ccc"], - "state": { - "currentStatus": "SUCCESS", - "modified": False, - "dependencies": [], - }, - "progress": 100, - "outputs": { - "output_1": { - "store": "0", - "path": "30359da5-ca4d-3288-a553-5f426a204fe6/e33c6880-1b1d-4419-82d7-270197738aa9/single_number.txt", - "eTag": "a87ff679a2f3e71d9181a67b7542122c", - }, - "output_2": 7, - }, - "runHash": "f92d1836aa1b6b1b031f9e1b982e631814708675c74ba5f02161e0f256382b2b", - }, - "4c08265a-427b-4ac3-9eab-1d11c822ada4": { - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "label": "sleeper", - "inputNodes": [], - }, - "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": { - "key": "simcore/services/frontend/iterator-consumer/probe/int", - "version": "1.0.0", - "label": "Probe Sensor - Integer", - "inputs": { - "in_1": { - "nodeUuid": "e33c6880-1b1d-4419-82d7-270197738aa9", - "output": "output_2", - } - }, - "inputNodes": ["e33c6880-1b1d-4419-82d7-270197738aa9"], - }, - "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": { - "key": "simcore/services/frontend/iterator-consumer/probe/int", - "version": "1.0.0", - "label": "Probe Sensor - Integer_2", - "inputs": { - "in_1": { - "nodeUuid": "0f1e38c9-dcb7-443c-a745-91b97ac28ccc", - "output": "out_1", - } - }, - "inputNodes": ["0f1e38c9-dcb7-443c-a745-91b97ac28ccc"], - }, - "d76fca06-f050-4790-88a8-0aac10c87b39": { - "key": "simcore/services/frontend/parameter/boolean", - "version": "1.0.0", - "label": "Boolean Parameter", - "inputs": {}, - "inputNodes": [], - "outputs": {"out_1": True}, - }, - } - - -def test_extract_project_results(fake_workbench: dict[str, Any]): - - results = extract_project_results(fake_workbench) - - print(json.dumps(results.progress, indent=1)) - print(json.dumps(results.labels, indent=1)) - print(json.dumps(results.values, indent=1)) - - # this has to be something that shall be deployable in a table - assert results.progress == { - "4c08265a-427b-4ac3-9eab-1d11c822ada4": 0, - "e33c6880-1b1d-4419-82d7-270197738aa9": 100, - } - - # labels are not unique, so there is a map to nodeids - assert results.labels == { - "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": "Integer iterator", - "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": "Probe Sensor - Integer", - "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": "Probe Sensor - Integer_2", - "d76fca06-f050-4790-88a8-0aac10c87b39": "Boolean Parameter", - } - # this is basically a tree that defines columns - assert results.values == { - "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": {"out_1": 1, "out_2": [3, 4]}, - "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": {"in_1": 7}, - "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": {"in_1": 1}, - "d76fca06-f050-4790-88a8-0aac10c87b39": {"out_1": True}, - } - - -@pytest.mark.parametrize( - "model_cls", - [ExtractedResults], -) -def test_models_examples( - model_cls: type[BaseModel], model_cls_examples: dict[str, Any] -): - for name, example in model_cls_examples.items(): - print(name, ":", json.dumps(example, indent=1)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py index 28a01dacbc8..ef79e68ee1b 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/conftest.py @@ -8,10 +8,10 @@ import pytest from faker import Faker from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingPlanPage, PricingPlanToServiceGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingPlanPage, + RutPricingUnitGet, ) from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -45,13 +45,15 @@ def mock_rpc_resource_usage_tracker_service_api( ) -> dict[str, MagicMock]: return { ## Pricing plans - "list_pricing_plans": mocker.patch( - "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_plans.list_pricing_plans", + "list_pricing_plans_without_pricing_units": mocker.patch( + "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_plans.list_pricing_plans_without_pricing_units", autospec=True, - return_value=PricingPlanPage( + return_value=RutPricingPlanPage( items=[ - PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][0], + RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][ + 0 + ], ) ], total=1, @@ -60,44 +62,44 @@ def mock_rpc_resource_usage_tracker_service_api( "get_pricing_plan": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_plans.get_pricing_plan", autospec=True, - return_value=PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][0], + return_value=RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][0], ), ), "create_pricing_plan": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_plans.create_pricing_plan", autospec=True, - return_value=PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][0], + return_value=RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][0], ), ), "update_pricing_plan": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_plans.update_pricing_plan", autospec=True, - return_value=PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][0], + return_value=RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][0], ), ), ## Pricing units "get_pricing_unit": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_units.get_pricing_unit", autospec=True, - return_value=PricingUnitGet.model_validate( - PricingUnitGet.model_config["json_schema_extra"]["examples"][0], + return_value=RutPricingUnitGet.model_validate( + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][0], ), ), "create_pricing_unit": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_units.create_pricing_unit", autospec=True, - return_value=PricingUnitGet.model_validate( - PricingUnitGet.model_config["json_schema_extra"]["examples"][0], + return_value=RutPricingUnitGet.model_validate( + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][0], ), ), "update_pricing_unit": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_service.pricing_units.update_pricing_unit", autospec=True, - return_value=PricingUnitGet.model_validate( - PricingUnitGet.model_config["json_schema_extra"]["examples"][0], + return_value=RutPricingUnitGet.model_validate( + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][0], ), ), ## Pricing plan to service diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py index cfb2b06a789..35f6255a5e1 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py @@ -23,7 +23,7 @@ def mock_catalog_client(mocker: MockerFixture, faker: Faker) -> dict[str, MagicMock]: return { "get_service": mocker.patch( - "simcore_service_webserver.resource_usage._pricing_plans_admin_service.catalog_client.get_service", + "simcore_service_webserver.resource_usage._pricing_plans_admin_service.catalog_service.get_service", autospec=True, ) } diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py index bbe5fa9f951..046bf286d48 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py @@ -11,8 +11,8 @@ import pytest from aiohttp.test_utils import TestClient from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingPlanGet, - PricingUnitGet, + RutPricingPlanGet, + RutPricingUnitGet, ) from models_library.api_schemas_webserver import resource_usage as webserver_api from models_library.utils.fastapi_encoders import jsonable_encoder @@ -32,12 +32,12 @@ def mock_rut_api_responses( assert client.app settings: ResourceUsageTrackerSettings = get_plugin_settings(client.app) - pricing_unit_get = PricingUnitGet.model_validate( - PricingUnitGet.model_config["json_schema_extra"]["examples"][0] + pricing_unit_get = RutPricingUnitGet.model_validate( + RutPricingUnitGet.model_config["json_schema_extra"]["examples"][0] ) - service_pricing_plan_get = PricingPlanGet.model_validate( - PricingPlanGet.model_config["json_schema_extra"]["examples"][0], + service_pricing_plan_get = RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_config["json_schema_extra"]["examples"][0], ) aioresponses_mocker.get( diff --git a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py index 0f70c98856c..9669f1eea90 100644 --- a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py +++ b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py @@ -30,7 +30,7 @@ from simcore_postgres_database.models.tags import tags from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.db.plugin import get_database_engine -from simcore_service_webserver.products._api import get_product +from simcore_service_webserver.products._service import get_product from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py b/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py index 1128f9a707a..9c6ad78f4a8 100644 --- a/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py +++ b/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py @@ -13,6 +13,7 @@ from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.openapi_specs import Entrypoint +from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.application import create_application from simcore_service_webserver.application_settings import get_application_settings from simcore_service_webserver.rest._utils import get_openapi_specs_path @@ -41,8 +42,6 @@ def app_environment( "WEBSERVER_GARBAGE_COLLECTOR": "null", # enable plugins that by default are disabled "WEBSERVER_DEV_FEATURES_ENABLED": "1", - "WEBSERVER_VERSION_CONTROL": "1", - "WEBSERVER_META_MODELING": "1", "WEBSERVER_CLUSTERS": "1", # enables activity WEBSERVER_ACTIVITY "PROMETHEUS_URL": f"https://{faker.domain_name()}", @@ -77,7 +76,16 @@ def test_app_named_resources_against_openapi_specs( openapi_specs_entrypoints: set[Entrypoint], app_rest_entrypoints: set[Entrypoint], ): - assert app_rest_entrypoints == openapi_specs_entrypoints + # remove task-legacy routes. These should not be exposed. + # this test compares directly against the openapi specs. In future it would be + # cleaner to compare against the fastapi app entry points in specs and + # avoid including the endpoints there + required_entry_points = { + e + for e in app_rest_entrypoints + if not e.path.startswith(f"/{API_VTAG}/tasks-legacy") + } + assert required_entry_points == openapi_specs_entrypoints # NOTE: missing here is: # - input schemas (path, query and body) diff --git a/services/web/server/tests/unit/with_dbs/03/test_email.py b/services/web/server/tests/unit/with_dbs/03/test_email.py index c208162d318..244f090ab40 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_email.py +++ b/services/web/server/tests/unit/with_dbs/03/test_email.py @@ -44,7 +44,6 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc "WEBSERVER_EXPORTER": "null", "WEBSERVER_GARBAGE_COLLECTOR": "null", "WEBSERVER_GROUPS": "1", - "WEBSERVER_META_MODELING": "0", "WEBSERVER_PRODUCTS": "1", "WEBSERVER_PUBLICATIONS": "0", "WEBSERVER_REMOTE_DEBUG": "0", @@ -52,7 +51,6 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc "WEBSERVER_STUDIES_DISPATCHER": "null", "WEBSERVER_TAGS": "1", "WEBSERVER_TRACING": "null", - "WEBSERVER_VERSION_CONTROL": "0", "WEBSERVER_WALLETS": "0", }, ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index d62cac76a51..4aa9791d226 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -29,19 +29,26 @@ from simcore_postgres_database.models.projects_to_products import projects_to_products from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.utils_projects_nodes import ProjectNodesRepo -from simcore_service_webserver.projects._db_utils import PermissionStr -from simcore_service_webserver.projects._groups_db import update_or_insert_project_group +from simcore_service_webserver.projects._groups_repository import ( + update_or_insert_project_group, +) +from simcore_service_webserver.projects._projects_repository_legacy import ( + ProjectAccessRights, + ProjectDBAPI, +) +from simcore_service_webserver.projects._projects_repository_legacy_utils import ( + PermissionStr, +) +from simcore_service_webserver.projects._projects_service import ( + _check_project_node_has_all_required_inputs, +) from simcore_service_webserver.projects.api import has_user_project_access_rights -from simcore_service_webserver.projects.db import ProjectAccessRights, ProjectDBAPI from simcore_service_webserver.projects.exceptions import ( NodeNotFoundError, ProjectNodeRequiredInputsNotSetError, ProjectNotFoundError, ) from simcore_service_webserver.projects.models import ProjectDict -from simcore_service_webserver.projects.projects_service import ( - _check_project_node_has_all_required_inputs, -) from simcore_service_webserver.users.exceptions import UserNotFoundError from simcore_service_webserver.utils import to_datetime from sqlalchemy.engine.result import Row @@ -350,7 +357,7 @@ async def test_insert_project_to_db( @pytest.mark.parametrize( "user_role", - [(UserRole.USER)], + [UserRole.USER], ) async def test_patch_user_project_workbench_raises_if_project_does_not_exist( fake_project: dict[str, Any], @@ -376,7 +383,7 @@ async def test_patch_user_project_workbench_raises_if_project_does_not_exist( @pytest.mark.parametrize( "user_role", - [(UserRole.USER)], + [UserRole.USER], ) async def test_patch_user_project_workbench_creates_nodes( fake_project: dict[str, Any], @@ -420,7 +427,7 @@ async def test_patch_user_project_workbench_creates_nodes( @pytest.mark.parametrize( "user_role", - [(UserRole.USER)], + [UserRole.USER], ) async def test_patch_user_project_workbench_creates_nodes_raises_if_invalid_node_is_passed( fake_project: dict[str, Any], @@ -457,7 +464,7 @@ async def test_patch_user_project_workbench_creates_nodes_raises_if_invalid_node @pytest.mark.parametrize( "user_role", - [(UserRole.USER)], + [UserRole.USER], ) @pytest.mark.parametrize("number_of_nodes", [1, randint(250, 300)]) # noqa: S311 async def test_patch_user_project_workbench_concurrently( @@ -514,18 +521,18 @@ async def test_patch_user_project_workbench_concurrently( for n in range(_NUMBER_OF_NODES): expected_project["workbench"][node_uuids[n]].update(randomly_created_outputs[n]) - patched_projects: list[ - tuple[dict[str, Any], dict[str, Any]] - ] = await asyncio.gather( - *[ - db_api._update_project_workbench( # noqa: SLF001 - {NodeIDStr(node_uuids[n]): randomly_created_outputs[n]}, - user_id=logged_user["id"], - project_uuid=new_project["uuid"], - allow_workbench_changes=False, - ) - for n in range(_NUMBER_OF_NODES) - ] + patched_projects: list[tuple[dict[str, Any], dict[str, Any]]] = ( + await asyncio.gather( + *[ + db_api._update_project_workbench( # noqa: SLF001 + {NodeIDStr(node_uuids[n]): randomly_created_outputs[n]}, + user_id=logged_user["id"], + project_uuid=new_project["uuid"], + allow_workbench_changes=False, + ) + for n in range(_NUMBER_OF_NODES) + ] + ) ) # NOTE: each returned project contains the project with some updated workbenches # the ordering is uncontrolled. @@ -743,9 +750,9 @@ async def test_replace_user_project( }, "output_2": 5, } - node_data[ - "runHash" - ] = "5b0583fa546ac82f0e41cef9705175b7187ce3928ba42892e842add912c16676" + node_data["runHash"] = ( + "5b0583fa546ac82f0e41cef9705175b7187ce3928ba42892e842add912c16676" + ) # replacing with the new entries shall return the very same data replaced_project = await db_api.replace_project( working_project, @@ -917,7 +924,7 @@ async def inserted_project( ), ], ) -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_check_project_node_has_all_required_inputs_raises( client: TestClient, logged_user: dict[str, Any], @@ -948,7 +955,7 @@ async def test_check_project_node_has_all_required_inputs_raises( ), ], ) -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_check_project_node_has_all_required_inputs_ok( client: TestClient, logged_user: dict[str, Any], diff --git a/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py b/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py index dd79e1a9b6c..88970409a8e 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py +++ b/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py @@ -13,8 +13,8 @@ from aiohttp.test_utils import TestClient from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status -from simcore_service_webserver._constants import APP_SETTINGS_KEY from simcore_service_webserver.application_settings import ApplicationSettings +from simcore_service_webserver.constants import APP_SETTINGS_KEY from simcore_service_webserver.login._constants import ( MAX_2FA_CODE_RESEND, MAX_2FA_CODE_TRIALS, diff --git a/services/web/server/tests/unit/with_dbs/03/test_socketio.py b/services/web/server/tests/unit/with_dbs/03/test_socketio.py index 5b63d8f3c84..deca5e69c4e 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_socketio.py +++ b/services/web/server/tests/unit/with_dbs/03/test_socketio.py @@ -27,7 +27,6 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc "WEBSERVER_EXPORTER": "null", "WEBSERVER_GARBAGE_COLLECTOR": "null", "WEBSERVER_GROUPS": "0", - "WEBSERVER_META_MODELING": "0", "WEBSERVER_NOTIFICATIONS": "0", "WEBSERVER_PROJECTS": "null", "WEBSERVER_PUBLICATIONS": "0", @@ -41,7 +40,6 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc "WEBSERVER_CATALOG": "null", "WEBSERVER_REDIS": "null", "WEBSERVER_SCICRUNCH": "null", - "WEBSERVER_VERSION_CONTROL": "0", "WEBSERVER_WALLETS": "0", }, ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_users.py b/services/web/server/tests/unit/with_dbs/03/test_users.py index 6b0ba408cc0..c4008f75235 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users.py @@ -35,7 +35,11 @@ random_pre_registration_details, ) from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_login import ( + NewUser, + UserInfoDict, + switch_client_session_to, +) from servicelib.aiohttp import status from servicelib.rest_constants import RESPONSE_MODEL_POLICY from simcore_service_webserver.users._common.schemas import ( @@ -62,15 +66,36 @@ def app_environment( @pytest.fixture -async def private_user(client: TestClient) -> AsyncIterable[UserInfoDict]: +def partial_first_name() -> str: + return "Jaimito" + + +@pytest.fixture +def partial_username() -> str: + return "COMMON_USERNAME" + + +@pytest.fixture +def partial_email() -> str: + return "@acme.com" + + +@pytest.fixture +async def private_user( + client: TestClient, + partial_username: str, + partial_email: str, + partial_first_name: str, +) -> AsyncIterable[UserInfoDict]: assert client.app async with NewUser( app=client.app, user_data={ - "name": "jamie01", - "first_name": "James", + "name": f"james{partial_username}", + "first_name": partial_first_name, "last_name": "Bond", - "email": "james@find.me", + "email": f"james{partial_email}", + "privacy_hide_username": True, "privacy_hide_email": True, "privacy_hide_fullname": True, }, @@ -79,15 +104,18 @@ async def private_user(client: TestClient) -> AsyncIterable[UserInfoDict]: @pytest.fixture -async def semi_private_user(client: TestClient) -> AsyncIterable[UserInfoDict]: +async def semi_private_user( + client: TestClient, partial_username: str, partial_first_name: str +) -> AsyncIterable[UserInfoDict]: assert client.app async with NewUser( app=client.app, user_data={ - "name": "maxwell", - "first_name": "James", + "name": f"maxwell{partial_username}", + "first_name": partial_first_name, "last_name": "Maxwell", "email": "j@maxwell.me", + "privacy_hide_username": False, "privacy_hide_email": True, "privacy_hide_fullname": False, # <-- }, @@ -96,15 +124,18 @@ async def semi_private_user(client: TestClient) -> AsyncIterable[UserInfoDict]: @pytest.fixture -async def public_user(client: TestClient) -> AsyncIterable[UserInfoDict]: +async def public_user( + client: TestClient, partial_username: str, partial_email: str +) -> AsyncIterable[UserInfoDict]: assert client.app async with NewUser( app=client.app, user_data={ - "name": "taylie01", + "name": f"taylor{partial_username}", "first_name": "Taylor", "last_name": "Swift", - "email": "taylor@find.me", + "email": f"taylor{partial_email}", + "privacy_hide_username": False, "privacy_hide_email": False, "privacy_hide_fullname": False, }, @@ -112,44 +143,56 @@ async def public_user(client: TestClient) -> AsyncIterable[UserInfoDict]: yield usr -@pytest.mark.acceptance_test( - "https://github.com/ITISFoundation/osparc-issues/issues/1779" -) @pytest.mark.parametrize("user_role", [UserRole.USER]) -async def test_search_users( +async def test_search_users_by_partial_fullname( + user_role: UserRole, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, - public_user: UserInfoDict, - semi_private_user: UserInfoDict, + partial_first_name: str, private_user: UserInfoDict, + semi_private_user: UserInfoDict, + public_user: UserInfoDict, ): assert client.app assert user_role.value == logged_user["role"] + # logged_user has default settings assert private_user["id"] != logged_user["id"] assert public_user["id"] != logged_user["id"] # SEARCH by partial first_name - partial_name = "james" - assert partial_name in private_user.get("first_name", "").lower() - assert partial_name in semi_private_user.get("first_name", "").lower() + assert partial_first_name in private_user.get("first_name", "") + assert partial_first_name in semi_private_user.get("first_name", "") + assert partial_first_name not in public_user.get("first_name", "") url = client.app.router["search_users"].url_for() - resp = await client.post(f"{url}", json={"match": partial_name}) + resp = await client.post(f"{url}", json={"match": partial_first_name}) data, _ = await assert_status(resp, status.HTTP_200_OK) + # expected `semi_private_user` found found = TypeAdapter(list[UserGet]).validate_python(data) assert found assert len(found) == 1 - assert semi_private_user["name"] == found[0].user_name + assert found[0].user_name == semi_private_user["name"] assert found[0].first_name == semi_private_user.get("first_name") assert found[0].last_name == semi_private_user.get("last_name") assert found[0].email is None + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_search_users_by_partial_email( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + partial_email: str, + public_user: UserInfoDict, + semi_private_user: UserInfoDict, + private_user: UserInfoDict, +): + # SEARCH by partial email - partial_email = "@find.m" assert partial_email in private_user["email"] + assert partial_email not in semi_private_user["email"] assert partial_email in public_user["email"] url = client.app.router["search_users"].url_for() @@ -159,15 +202,39 @@ async def test_search_users( found = TypeAdapter(list[UserGet]).validate_python(data) assert found assert len(found) == 1 + + # expected `public_user` found assert found[0].user_id == public_user["id"] assert found[0].user_name == public_user["name"] assert found[0].email == public_user["email"] assert found[0].first_name == public_user.get("first_name") assert found[0].last_name == public_user.get("last_name") + # SEARCH user for admin (from a USER) + url = ( + client.app.router["search_users_for_admin"] + .url_for() + .with_query(email=partial_email) + ) + resp = await client.get(f"{url}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_search_users_by_partial_username( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + partial_username: str, + public_user: UserInfoDict, + semi_private_user: UserInfoDict, + private_user: UserInfoDict, +): + assert client.app + # SEARCH by partial username - partial_username = "ie01" assert partial_username in private_user["name"] + assert partial_username in semi_private_user["name"] assert partial_username in public_user["name"] url = client.app.router["search_users"].url_for() @@ -178,24 +245,45 @@ async def test_search_users( assert found assert len(found) == 2 + # expected `public_user` found index = [u.user_id for u in found].index(public_user["id"]) assert found[index].user_name == public_user["name"] + assert found[index].email == public_user["email"] + assert found[index].first_name == public_user.get("first_name") + assert found[index].last_name == public_user.get("last_name") - # check privacy + # expected `semi_private_user` found index = (index + 1) % 2 - assert found[index].user_name == private_user["name"] + assert found[index].user_name == semi_private_user["name"] assert found[index].email is None - assert found[index].first_name is None - assert found[index].last_name is None + assert found[index].first_name == semi_private_user.get("first_name") + assert found[index].last_name == semi_private_user.get("last_name") - # SEARCH user for admin (from a USER) - url = ( - client.app.router["search_users_for_admin"] - .url_for() - .with_query(email=partial_email) - ) - resp = await client.get(f"{url}") - await assert_status(resp, status.HTTP_403_FORBIDDEN) + +async def test_search_myself( + client: TestClient, + public_user: UserInfoDict, + semi_private_user: UserInfoDict, + private_user: UserInfoDict, +): + assert client.app + for user in [public_user, semi_private_user, private_user]: + async with switch_client_session_to(client, user): + + # search me + url = client.app.router["search_users"].url_for() + resp = await client.post(f"{url}", json={"match": user["name"]}) + data, _ = await assert_status(resp, status.HTTP_200_OK) + + found = TypeAdapter(list[UserGet]).validate_python(data) + assert found + assert len(found) == 1 + + # I can see my own data + assert found[0].user_name == user["name"] + assert found[0].email == user["email"] + assert found[0].first_name == user.get("first_name") + assert found[0].last_name == user.get("last_name") @pytest.mark.acceptance_test( @@ -203,9 +291,9 @@ async def test_search_users( ) @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_get_user_by_group_id( + user_role: UserRole, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, public_user: UserInfoDict, private_user: UserInfoDict, ): @@ -215,7 +303,7 @@ async def test_get_user_by_group_id( assert private_user["id"] != logged_user["id"] assert public_user["id"] != logged_user["id"] - # GET user by primary GID + # GET public_user by its primary gid url = client.app.router["get_all_group_users"].url_for( gid=f"{public_user['primary_gid']}" ) @@ -229,6 +317,7 @@ async def test_get_user_by_group_id( assert users[0].first_name == public_user.get("first_name") assert users[0].last_name == public_user.get("last_name") + # GET private_user by its primary gid url = client.app.router["get_all_group_users"].url_for( gid=f"{private_user['primary_gid']}" ) @@ -238,9 +327,9 @@ async def test_get_user_by_group_id( users = TypeAdapter(list[GroupUserGet]).validate_python(data) assert len(users) == 1 assert users[0].id == private_user["id"] - assert users[0].user_name == private_user["name"] - assert users[0].first_name is None - assert users[0].last_name is None + assert users[0].user_name is None, "It's private" + assert users[0].first_name is None, "It's private" + assert users[0].last_name is None, "It's private" @pytest.mark.parametrize( @@ -274,9 +363,9 @@ async def test_access_rights_on_get_profile( ], ) async def test_access_update_profile( + user_role: UserRole, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, expected: HTTPStatus, ): assert client.app @@ -290,9 +379,9 @@ async def test_access_update_profile( @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_get_profile( + user_role: UserRole, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, primary_group: dict[str, Any], standard_groups: list[dict[str, Any]], all_group: dict[str, str], @@ -338,9 +427,9 @@ async def test_get_profile( @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_update_profile( + user_role: UserRole, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, ): assert client.app @@ -379,9 +468,9 @@ def _copy(data: dict, exclude: set) -> dict: @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_profile_workflow( + user_role: UserRole, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, ): assert client.app @@ -414,6 +503,7 @@ async def test_profile_workflow( assert updated_profile.user_name == "odei123" assert updated_profile.privacy != my_profile.privacy + assert updated_profile.privacy.hide_username == my_profile.privacy.hide_username assert updated_profile.privacy.hide_email == my_profile.privacy.hide_email assert updated_profile.privacy.hide_fullname != my_profile.privacy.hide_fullname @@ -421,9 +511,9 @@ async def test_profile_workflow( @pytest.mark.parametrize("user_role", [UserRole.USER]) @pytest.mark.parametrize("invalid_username", ["", "_foo", "superadmin", "foo..-123"]) async def test_update_wrong_user_name( + user_role: UserRole, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, invalid_username: str, ): assert client.app @@ -440,10 +530,10 @@ async def test_update_wrong_user_name( @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_update_existing_user_name( + user_role: UserRole, user: UserInfoDict, logged_user: UserInfoDict, client: TestClient, - user_role: UserRole, ): assert client.app @@ -699,7 +789,7 @@ def test_preuserprofile_parse_model_from_request_form_data( def test_preuserprofile_parse_model_without_extras( - account_request_form: dict[str, Any] + account_request_form: dict[str, Any], ): required = { f.alias or f_name diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py index bdfe1af8d81..0a33d8f8921 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py @@ -14,8 +14,8 @@ PreferenceName, ) from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from simcore_service_webserver._constants import APP_SETTINGS_KEY from simcore_service_webserver.application_settings import ApplicationSettings +from simcore_service_webserver.constants import APP_SETTINGS_KEY from simcore_service_webserver.users._preferences_models import ( ALL_FRONTEND_PREFERENCES, TelemetryLowDiskSpaceWarningThresholdFrontendUserPreference, diff --git a/services/web/server/tests/unit/with_dbs/03/trash/conftest.py b/services/web/server/tests/unit/with_dbs/03/trash/conftest.py new file mode 100644 index 00000000000..5c742b12144 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/trash/conftest.py @@ -0,0 +1,102 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import logging +from collections.abc import AsyncIterable, Callable +from pathlib import Path + +import pytest +from aiohttp import web +from aiohttp.test_utils import TestClient +from aioresponses import aioresponses +from models_library.products import ProductName +from pytest_mock import MockerFixture +from pytest_simcore.helpers.logging_tools import log_context +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_projects import NewProject +from simcore_service_webserver.projects.models import ProjectDict + +_logger = logging.getLogger(__name__) + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, {"WEBSERVER_DEV_FEATURES_ENABLED": "1"} + ) + + +@pytest.fixture +async def other_user( + client: TestClient, logged_user: UserInfoDict +) -> AsyncIterable[UserInfoDict]: + # new user different from logged_user + async with NewUser( + { + "name": f"other_user_than_{logged_user['name']}", + "role": "USER", + }, + client.app, + ) as user: + yield user + + +@pytest.fixture +async def other_user_project( + client: TestClient, + fake_project: ProjectDict, + other_user: UserInfoDict, + tests_data_dir: Path, + osparc_product_name: ProductName, +) -> AsyncIterable[ProjectDict]: + async with NewProject( + fake_project, + client.app, + user_id=other_user["id"], + product_name=osparc_product_name, + tests_data_dir=tests_data_dir, + ) as project: + yield project + + +@pytest.fixture +def mocked_catalog( + user_project: ProjectDict, + catalog_subsystem_mock: Callable[[list[ProjectDict]], None], +): + catalog_subsystem_mock([user_project]) + + +@pytest.fixture +def mocked_director_v2(director_v2_service_mock: aioresponses): + ... + + +@pytest.fixture +def mocked_storage(storage_subsystem_mock: MockedStorageSubsystem): + ... + + +@pytest.fixture +def with_disabled_background_task_to_prune_trash(mocker: MockerFixture) -> None: + async def _empty_lifespan(app: web.Application): + with log_context( + logging.INFO, "Fake background_task_to_prune_trash event", logger=_logger + ): + yield + + mocker.patch( + "simcore_service_webserver.garbage_collector._tasks_trash.create_background_task_to_prune_trash", + autospec=True, + return_value=_empty_lifespan, + ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_trash.py b/services/web/server/tests/unit/with_dbs/03/trash/test_trash.py similarity index 78% rename from services/web/server/tests/unit/with_dbs/03/test_trash.py rename to services/web/server/tests/unit/with_dbs/03/trash/test_trash.py index eb598bbdb1d..396dfcc7c97 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_trash.py +++ b/services/web/server/tests/unit/with_dbs/03/trash/test_trash.py @@ -7,57 +7,33 @@ import asyncio -from collections.abc import AsyncIterable, Callable +from collections.abc import AsyncIterable from unittest.mock import MagicMock from uuid import UUID import arrow import pytest from aiohttp.test_utils import TestClient -from aioresponses import aioresponses from models_library.api_schemas_webserver.folders_v2 import FolderGet from models_library.api_schemas_webserver.projects import ProjectGet, ProjectListItem from models_library.api_schemas_webserver.workspaces import WorkspaceGet from models_library.rest_pagination import Page from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.projects._groups_api import ProjectGroupGet +from simcore_service_webserver.projects._groups_service import ProjectGroupGet from simcore_service_webserver.projects.models import ProjectDict +from tenacity import AsyncRetrying, stop_after_attempt, wait_fixed from yarl import URL -@pytest.fixture -def app_environment( - app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - return app_environment | setenvs_from_dict( - monkeypatch, {"WEBSERVER_DEV_FEATURES_ENABLED": "1"} - ) - - @pytest.fixture def user_role() -> UserRole: return UserRole.USER -@pytest.fixture -def mocked_catalog( - user_project: ProjectDict, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], -): - catalog_subsystem_mock([user_project]) - - -@pytest.fixture -def mocked_director_v2(director_v2_service_mock: aioresponses): - ... - - @pytest.mark.acceptance_test( "For https://github.com/ITISFoundation/osparc-simcore/pull/6579" ) @@ -77,20 +53,20 @@ async def test_trash_projects( # noqa: PLR0915 # this test should emulate NO errors stopping services mock_remove_dynamic_services = mocker.patch( - "simcore_service_webserver.projects._trash_service.projects_service.remove_project_dynamic_services", + "simcore_service_webserver.projects._trash_service._projects_service_delete._projects_service.remove_project_dynamic_services", autospec=True, ) mock_stop_pipeline = mocker.patch( - "simcore_service_webserver.projects._trash_service.director_v2_api.stop_pipeline", + "simcore_service_webserver.projects._trash_service._projects_service_delete.director_v2_service.stop_pipeline", autospec=True, ) mocker.patch( - "simcore_service_webserver.projects._trash_service.director_v2_api.is_pipeline_running", + "simcore_service_webserver.projects._trash_service.director_v2_service.is_pipeline_running", return_value=is_project_running, autospec=True, ) mocker.patch( - "simcore_service_webserver.projects._trash_service.dynamic_scheduler_api.list_dynamic_services", + "simcore_service_webserver.projects._trash_service.dynamic_scheduler_service.list_dynamic_services", return_value=[mocker.MagicMock()] if is_project_running else [], autospec=True, ) @@ -183,21 +159,6 @@ async def test_trash_projects( # noqa: PLR0915 mock_remove_dynamic_services.assert_awaited() -@pytest.fixture -async def other_user( - client: TestClient, logged_user: UserInfoDict -) -> AsyncIterable[UserInfoDict]: - # new user different from logged_user - async with NewUser( - { - "name": f"other_user_than_{logged_user['name']}", - "role": "USER", - }, - client.app, - ) as user: - yield user - - async def test_trash_projects_shared_among_users( client: TestClient, logged_user: UserInfoDict, @@ -377,6 +338,7 @@ async def test_trash_folder_with_content( resp = await client.post("/v0/folders", json={"name": "My first folder"}) data, _ = await assert_status(resp, status.HTTP_201_CREATED) folder = FolderGet.model_validate(data) + assert folder.trashed_at is None # CREATE a SUB-folder resp = await client.post( @@ -420,11 +382,12 @@ async def test_trash_folder_with_content( resp = await client.post(f"/v0/folders/{folder.folder_id}:trash") await assert_status(resp, status.HTTP_204_NO_CONTENT) - # ONLY folder listed in trash. The rest is not listed anymore! + # ONLY folder listed in trash. The rest is not listed anymore since they are implicitly trashed! resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'}) await assert_status(resp, status.HTTP_200_OK) page = Page[FolderGet].model_validate(await resp.json()) assert page.meta.total == 1 + assert page.data[0].trashed_at is not None assert page.data[0].folder_id == folder.folder_id resp = await client.get( @@ -789,3 +752,175 @@ async def test_trash_project_in_subfolder( page = Page[ProjectGet].model_validate(await resp.json()) assert page.meta.total == 1 assert page.data[0].uuid == project_uuid + + +async def test_trash_project_explitictly_and_empty_trash_bin( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, + mocked_dynamic_services_interface: dict[str, MagicMock], + mocked_storage: None, +): + assert client.app + + project_uuid = UUID(user_project["uuid"]) + + # TRASH project + trashing_at = arrow.utcnow().datetime + resp = await client.post( + f"/v0/projects/{project_uuid}:trash", params={"force": "true"} + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # LIST trashed projects + resp = await client.get("/v0/projects", params={"filters": '{"trashed": true}'}) + await assert_status(resp, status.HTTP_200_OK) + + page = Page[ProjectListItem].model_validate(await resp.json()) + assert page.meta.total == 1 + assert page.data[0].uuid == project_uuid + + # GET trashed project + resp = await client.get(f"/v0/projects/{project_uuid}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = ProjectGet.model_validate(data) + assert got.uuid == project_uuid + assert got.trashed_at is not None + assert trashing_at < got.trashed_at < arrow.utcnow().datetime + + # force EMPTY trash + resp = await client.post("/v0/trash:empty") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # waits for deletion + async for attempt in AsyncRetrying( + stop=stop_after_attempt(3), wait=wait_fixed(1), reraise=True + ): + with attempt: + # LIST trashed projects again + resp = await client.get( + "/v0/projects", params={"filters": '{"trashed": true}'} + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[ProjectListItem].model_validate(await resp.json()) + assert page.meta.total == 0 + + # GET trahsed project + resp = await client.get(f"/v0/projects/{project_uuid}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + +async def test_trash_folder_with_subfolder_and_project_and_empty_bin( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, + mocked_dynamic_services_interface: dict[str, MagicMock], + mocked_storage: None, +): + assert client.app + + # CREATE a folder + resp = await client.post("/v0/folders", json={"name": "Parent Folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + parent_folder = FolderGet.model_validate(data) + + # CREATE a subfolder + resp = await client.post( + "/v0/folders", + json={"name": "Sub Folder", "parentFolderId": parent_folder.folder_id}, + ) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + sub_folder = FolderGet.model_validate(data) + + # MOVE project to subfolder + project_uuid = UUID(user_project["uuid"]) + resp = await client.put( + f"/v0/projects/{project_uuid}/folders/{sub_folder.folder_id}" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # TRASH the parent folder + resp = await client.post(f"/v0/folders/{parent_folder.folder_id}:trash") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # CHECK BIN + # - LIST trashed folders as shown in the bin (will show only explicilty) + resp = await client.get( + "/v0/folders:search", params={"filters": '{"trashed": true}'} + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].model_validate(await resp.json()) + assert page.meta.total == 1 + assert page.data[0].folder_id == parent_folder.folder_id + + # - LIST trashed projects (will show only explicit!) + resp = await client.get( + "/v0/projects:search", params={"filters": '{"trashed": true}'} + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[ProjectListItem].model_validate(await resp.json()) + assert page.meta.total == 0 + + # CHECK items + # - GET trashed parent folder (explicit) + resp = await client.get(f"/v0/folders/{parent_folder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.model_validate(data) + assert got.trashed_at is not None + assert got.trashed_by == logged_user["primary_gid"] + + # - GET trashed subfolder (implicit) + resp = await client.get(f"/v0/folders/{sub_folder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.model_validate(data) + assert got.trashed_at is not None + assert got.trashed_by == logged_user["primary_gid"] + + # GET trashed project (implicit) + resp = await client.get(f"/v0/projects/{project_uuid}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = ProjectGet.model_validate(data) + assert got.trashed_at is not None + assert got.trashed_by == logged_user["primary_gid"] + + # EMPTY trash + resp = await client.post("/v0/trash:empty") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # waits for deletion + async for attempt in AsyncRetrying( + stop=stop_after_attempt(3), wait=wait_fixed(1), reraise=True + ): + with attempt: + # GET trashed parent folder + resp = await client.get(f"/v0/folders/{parent_folder.folder_id}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + # GET trashed subfolder + resp = await client.get(f"/v0/folders/{sub_folder.folder_id}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + # GET trashed project + resp = await client.get(f"/v0/projects/{project_uuid}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # CHECK BIN + # LIST trashed (will show only explicit) + resp = await client.get( + "/v0/folders:search", params={"filters": '{"trashed": true}'} + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].model_validate(await resp.json()) + assert page.meta.total == 0 + + # - LIST trashed projects (will show only explicit!) + resp = await client.get( + "/v0/projects:search", params={"filters": '{"trashed": true}'} + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[ProjectListItem].model_validate(await resp.json()) + assert page.meta.total == 0 diff --git a/services/web/server/tests/unit/with_dbs/03/trash/test_trash_service.py b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_service.py new file mode 100644 index 00000000000..a58f32f6e3d --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_service.py @@ -0,0 +1,193 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from unittest.mock import MagicMock + +import pytest +from aiohttp.test_utils import TestClient +from models_library.api_schemas_webserver.projects import ProjectGet +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.webserver_login import ( + UserInfoDict, + switch_client_session_to, +) +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects import _trash_service +from simcore_service_webserver.projects.models import ProjectDict +from simcore_service_webserver.trash import trash_service + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + with_disabled_background_task_to_prune_trash: None, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + { + "TRASH_RETENTION_DAYS": "0", + "WEBSERVER_GARBAGE_COLLECTOR": "null", + }, + ) + + +@pytest.fixture +def user_role() -> UserRole: + return UserRole.USER + + +async def test_trash_service__delete_expired_trash( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + other_user: UserInfoDict, + other_user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, + mocked_dynamic_services_interface: dict[str, MagicMock], +): + assert client.app + assert logged_user["id"] != other_user["id"] + + # TRASH projects + # logged_user trashes his project + user_project_id = user_project["uuid"] + await _trash_service.trash_project( + client.app, + product_name="osparc", + user_id=logged_user["id"], + project_id=user_project_id, + force_stop_first=True, + explicit=True, + ) + + # other_user trashes his project + other_user_project_id = other_user_project["uuid"] + await _trash_service.trash_project( + client.app, + product_name="osparc", + user_id=other_user["id"], + project_id=other_user_project_id, + force_stop_first=True, + explicit=True, + ) + + resp = await client.get(f"/v0/projects/{user_project_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert ProjectGet.model_validate(data).trashed_by == logged_user["primary_gid"] + + # UNDER TEST: Run delete_expired_trash + await trash_service.safe_delete_expired_trash_as_admin(client.app) + + # ASSERT: logged_user tries to get the project and expects 404 + resp = await client.get(f"/v0/projects/{user_project_id}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # ASSERT: other_user tries to get the project and expects 404 + async with switch_client_session_to(client, other_user): + resp = await client.get(f"/v0/projects/{other_user_project_id}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + +async def test_trash_nested_folders_and_projects( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + other_user: UserInfoDict, + other_user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, + mocked_dynamic_services_interface: dict[str, MagicMock], +): + assert client.app + assert logged_user["id"] != other_user["id"] + + async with switch_client_session_to(client, logged_user): + # CREATE folders hierarchy for logged_user + resp = await client.post("/v0/folders", json={"name": "Root Folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + logged_user_root_folder = data + + resp = await client.post( + "/v0/folders", + json={ + "name": "Sub Folder", + "parentFolderId": logged_user_root_folder["folderId"], + }, + ) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + logged_user_sub_folder = data + + # MOVE project to subfolder + resp = await client.put( + f"/v0/projects/{user_project['uuid']}/folders/{logged_user_sub_folder['folderId']}" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # TRASH root folders + resp = await client.post( + f"/v0/folders/{logged_user_root_folder['folderId']}:trash" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + async with switch_client_session_to(client, other_user): + # CREATE folders hierarchy for other_user + resp = await client.post("/v0/folders", json={"name": "Root Folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + other_user_root_folder = data + + resp = await client.post( + "/v0/folders", + json={ + "name": "Sub Folder (other)", + "parentFolderId": other_user_root_folder["folderId"], + }, + ) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + other_user_sub_folder = data + + # MOVE project to subfolder + resp = await client.put( + f"/v0/projects/{other_user_project['uuid']}/folders/{other_user_sub_folder['folderId']}" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # TRASH root folders + resp = await client.post( + f"/v0/folders/{other_user_root_folder['folderId']}:trash" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # UNDER TEST + await trash_service.safe_delete_expired_trash_as_admin(client.app) + + async with switch_client_session_to(client, logged_user): + # Verify logged_user's resources are gone + resp = await client.get(f"/v0/folders/{logged_user_root_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/folders/{logged_user_sub_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/projects/{user_project['uuid']}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # Verify other_user's resources are gone + async with switch_client_session_to(client, other_user): + resp = await client.get(f"/v0/folders/{other_user_root_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/folders/{other_user_sub_folder['folderId']}") + await assert_status(resp, status.HTTP_403_FORBIDDEN) + + resp = await client.get(f"/v0/projects/{other_user_project['uuid']}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py deleted file mode 100644 index 4b6504bdfd3..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py +++ /dev/null @@ -1,246 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -from collections.abc import AsyncIterator, Awaitable, Callable -from pathlib import Path -from unittest import mock -from uuid import UUID - -import aiohttp -import pytest -from aiohttp.test_utils import TestClient -from common_library.dict_tools import remap_keys -from faker import Faker -from models_library.projects import ProjectID -from models_library.projects_nodes import Node -from models_library.services_resources import ServiceResourcesDict -from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder -from pytest_mock import MockerFixture -from pytest_simcore.helpers.faker_factories import random_project -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict -from pytest_simcore.helpers.webserver_projects import NewProject -from servicelib.aiohttp import status -from simcore_postgres_database.models.projects_version_control import ( - projects_vc_repos, - projects_vc_snapshots, -) -from simcore_service_webserver._meta import API_VTAG as VX -from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.db.plugin import APP_AIOPG_ENGINE_KEY -from simcore_service_webserver.projects.db import ProjectDBAPI -from simcore_service_webserver.projects.models import ProjectDict -from tenacity.asyncio import AsyncRetrying -from tenacity.stop import stop_after_delay - - -@pytest.fixture -def user_role() -> UserRole: - return UserRole.USER - - -@pytest.fixture -def fake_project(faker: Faker) -> ProjectDict: - # API model project data - suffix = faker.word() - return random_project( - name=f"{__file__}-project", - workbench={ - faker.uuid4(): { - "key": f"simcore/services/comp/test_{__name__}_{suffix}", - "version": "1.2.3", - "label": f"test_{__name__}_{suffix}", - "inputs": {"x": faker.pyint(), "y": faker.pyint()}, - } - }, - ) - - -@pytest.fixture -def catalog_subsystem_mock_override( - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], - fake_project: ProjectDict, -) -> None: - catalog_subsystem_mock([fake_project]) - - -@pytest.fixture -def app_environment( - catalog_subsystem_mock_override: None, - monkeypatch: pytest.MonkeyPatch, - app_environment: EnvVarsDict, -) -> EnvVarsDict: - - return app_environment | setenvs_from_dict( - monkeypatch, - { - # exclude - "WEBSERVER_ACTIVITY": "null", - "WEBSERVER_CLUSTERS": "null", - "WEBSERVER_COMPUTATION": "null", - "WEBSERVER_DIAGNOSTICS": "null", - "WEBSERVER_GARBAGE_COLLECTOR": "null", - "WEBSERVER_GROUPS": "0", - "WEBSERVER_PUBLICATIONS": "0", - "WEBSERVER_SOCKETIO": "0", - "WEBSERVER_STUDIES_DISPATCHER": "null", - "WEBSERVER_TAGS": "0", - "WEBSERVER_TRACING": "null", - # Module under test - "WEBSERVER_DEV_FEATURES_ENABLED": "1", - "WEBSERVER_VERSION_CONTROL": "1", - }, - ) - - -@pytest.fixture -async def user_id(logged_user: UserInfoDict) -> UserID: - return logged_user["id"] - - -@pytest.fixture() -def project_uuid(user_project: ProjectDict) -> ProjectID: - return UUID(user_project["uuid"]) - - -@pytest.fixture -async def user_project( - client: TestClient, - fake_project: ProjectDict, - user_id: int, - tests_data_dir: Path, - osparc_product_name: str, -) -> AsyncIterator[ProjectDict]: - # pylint: disable=no-value-for-parameter - - async with NewProject( - fake_project, - client.app, - user_id=user_id, - tests_data_dir=tests_data_dir, - product_name=osparc_product_name, - ) as project: - yield project - - # cleanup repos - assert client.app - engine = client.app[APP_AIOPG_ENGINE_KEY] - async with engine.acquire() as conn: - # cascade deletes everything except projects_vc_snapshot - await conn.execute(projects_vc_repos.delete()) - await conn.execute(projects_vc_snapshots.delete()) - - -@pytest.fixture -def request_update_project( - logged_user: UserInfoDict, - faker: Faker, - mocker: MockerFixture, -) -> Callable[[TestClient, UUID], Awaitable]: - mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.projects_service.is_service_deprecated", - autospec=True, - return_value=False, - ) - mocker.patch( - "simcore_service_webserver.projects._nodes_handlers.projects_service.catalog_client.get_service_resources", - autospec=True, - return_value=ServiceResourcesDict(), - ) - mocker.patch( - "simcore_service_webserver.dynamic_scheduler.api.list_dynamic_services", - return_value=[], - ) - - async def _go(client: TestClient, project_uuid: UUID) -> None: - resp: aiohttp.ClientResponse = await client.get(f"{VX}/projects/{project_uuid}") - - assert resp.status == 200 - body = await resp.json() - assert body - - project = body["data"] - - # remove all the nodes first - assert client.app - for node_id in project.get("workbench", {}): - delete_node_url = client.app.router["delete_node"].url_for( - project_id=f"{project_uuid}", node_id=node_id - ) - response = await client.delete(f"{delete_node_url}") - assert response.status == status.HTTP_204_NO_CONTENT - - # add a node - node_id = faker.uuid4() - node = Node.model_validate( - { - "key": f"simcore/services/comp/test_{__name__}", - "version": "1.0.0", - "label": f"test_{__name__}", - "inputs": {"x": faker.pyint(), "y": faker.pyint()}, - } - ) - - create_node_url = client.app.router["create_node"].url_for( - project_id=f"{project_uuid}" - ) - response = await client.post( - f"{create_node_url}", - json={ - "service_key": node.key, - "service_version": node.version, - "service_id": f"{node_id}", - }, - ) - assert response.status == status.HTTP_201_CREATED - project["workbench"] = {node_id: jsonable_encoder(node)} - - db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(client.app) - project_db = remap_keys(project, rename={"trashedAt": "trashed"}) - project_db.pop("state") - - await db.replace_project( - project_db, - logged_user["id"], - project_uuid=project_db["uuid"], - product_name="osparc", - ) - - return _go - - -@pytest.fixture -async def request_delete_project( - logged_user: UserInfoDict, - mocker: MockerFixture, -) -> AsyncIterator[Callable[[TestClient, UUID], Awaitable]]: - director_v2_api_delete_pipeline: mock.AsyncMock = mocker.patch( - "simcore_service_webserver.projects.projects_service.director_v2_api.delete_pipeline", - autospec=True, - ) - dynamic_scheduler_api_stop_dynamic_services_in_project: mock.AsyncMock = mocker.patch( - "simcore_service_webserver.projects.projects_service.dynamic_scheduler_api.stop_dynamic_services_in_project", - autospec=True, - ) - fire_and_forget_call_to_storage: mock.Mock = mocker.patch( - "simcore_service_webserver.projects._crud_api_delete.delete_data_folders_of_project", - autospec=True, - ) - - async def _go(client: TestClient, project_uuid: UUID) -> None: - resp: aiohttp.ClientResponse = await client.delete( - f"{VX}/projects/{project_uuid}" - ) - assert resp.status == 204 - - yield _go - - # ensure the call to delete data was completed - async for attempt in AsyncRetrying(reraise=True, stop=stop_after_delay(20)): - with attempt: - director_v2_api_delete_pipeline.assert_called() - dynamic_scheduler_api_stop_dynamic_services_in_project.assert_awaited() - fire_and_forget_call_to_storage.assert_called() diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py deleted file mode 100644 index ae95f95f9f9..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py +++ /dev/null @@ -1,29 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - - -from models_library.projects import NodesDict -from pydantic import ConfigDict, RootModel -from simcore_service_webserver.projects.models import ProjectDict -from simcore_service_webserver.version_control.db import compute_workbench_checksum - - -class WorkbenchModel(RootModel[NodesDict]): - model_config = ConfigDict(populate_by_name=True) - - -def test_compute_workbench_checksum(fake_project: ProjectDict): - - # as a dict - sha1_w_dict = compute_workbench_checksum(fake_project["workbench"]) - - workbench = WorkbenchModel.model_validate(fake_project["workbench"]) - - # with pydantic models, i.e. Nodes - # - # e.g. order after parse maps order in BaseModel but not in dict - # - sha1_w_model = compute_workbench_checksum(workbench.root) - - assert sha1_w_model == sha1_w_dict diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py deleted file mode 100644 index 9ae6a29c127..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_core.py +++ /dev/null @@ -1,104 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -from collections.abc import Awaitable, Callable -from uuid import UUID - -import pytest -from aiohttp import web -from aiohttp.test_utils import TestClient, make_mocked_request -from simcore_service_webserver._constants import RQT_USERID_KEY -from simcore_service_webserver.projects import projects_service -from simcore_service_webserver.projects.models import ProjectDict -from simcore_service_webserver.version_control._core import ( - checkout_checkpoint, - create_checkpoint, - list_checkpoints, - update_checkpoint, -) -from simcore_service_webserver.version_control.db import HEAD, VersionControlRepository - - -@pytest.fixture -def aiohttp_mocked_request(client: TestClient, user_id: int) -> web.Request: - req = make_mocked_request("GET", "/", app=client.app) - req[RQT_USERID_KEY] = user_id - return req - - -@pytest.mark.acceptance_test() -async def test_workflow( - client: TestClient, - project_uuid: UUID, - user_id: int, - user_project: ProjectDict, - aiohttp_mocked_request: web.Request, - request_update_project: Callable[[TestClient, UUID], Awaitable], - mock_dynamic_scheduler: None, - director_v2_service_mock: None, -): - vc_repo = VersionControlRepository.create_from_request(aiohttp_mocked_request) - - # ------------------------------------- - checkpoint1 = await create_checkpoint( - vc_repo, project_uuid, tag="v0", message="first commit" - ) - - assert not checkpoint1.parents_ids - assert checkpoint1.tags == ("v0",) - assert checkpoint1.message == "first commit" - - # ------------------------------------- - await request_update_project(client, project_uuid) - - checkpoint2 = await create_checkpoint( - vc_repo, project_uuid, tag="v1", message="second commit" - ) - - assert checkpoint2.tags == ("v1",) - assert (checkpoint1.id,) == checkpoint2.parents_ids - assert checkpoint1.checksum != checkpoint2.checksum - - # ------------------------------------- - checkpoints, total_count = await list_checkpoints(vc_repo, project_uuid) - assert total_count == 2 - assert checkpoints == [checkpoint2, checkpoint1] - - # ------------------------------------- - checkpoint2_updated = await update_checkpoint( - vc_repo, project_uuid, HEAD, message="updated message" - ) - - assert checkpoint2_updated.model_dump( - exclude={"message"} - ) == checkpoint2.model_dump(exclude={"message"}) - - # ------------------------------------- - # checking out to v1 - checkpoint_co = await checkout_checkpoint(vc_repo, project_uuid, checkpoint1.id) - assert checkpoint1 == checkpoint_co - - project = await projects_service.get_project_for_user( - aiohttp_mocked_request.app, str(project_uuid), user_id - ) - assert project["workbench"] == user_project["workbench"] - - # ------------------------------------- - # creating branches - await request_update_project(client, project_uuid) - - checkpoint3 = await create_checkpoint( - vc_repo, - project_uuid, - tag="v1.1", - message="second commit", # new_branch="v1.*" - ) - - checkpoints, total_count = await list_checkpoints(vc_repo, project_uuid) - assert total_count == 3 - assert checkpoints == [checkpoint3, checkpoint2_updated, checkpoint1] - - assert checkpoint3.parents_ids == checkpoint2.parents_ids - assert checkpoint2.parents_ids == (checkpoint1.id,) - # This is detached! diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py deleted file mode 100644 index df0d767a9e9..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py +++ /dev/null @@ -1,262 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - - -from collections.abc import Awaitable, Callable -from http import HTTPStatus -from uuid import UUID - -import aiohttp -import pytest -from aiohttp.test_utils import TestClient -from models_library.api_schemas_webserver.projects import ProjectGet -from models_library.projects import ProjectID -from models_library.rest_pagination import Page -from models_library.users import UserID -from pydantic.main import BaseModel -from pytest_simcore.helpers.assert_checks import assert_status -from servicelib.aiohttp import status -from simcore_service_webserver._meta import API_VTAG as VX -from simcore_service_webserver.projects.models import ProjectDict -from simcore_service_webserver.version_control.models import ( - CheckpointApiModel, - RepoApiModel, -) - - -async def _assert_resp_page( - resp: aiohttp.ClientResponse, - expected_page_cls: type[Page], - expected_total: int, - expected_count: int, -): - assert resp.status == status.HTTP_200_OK, f"Got {await resp.text()}" - body = await resp.json() - - page = expected_page_cls.model_validate(body) - assert page.meta.total == expected_total - assert page.meta.count == expected_count - return page - - -async def _assert_status_and_body( - resp, expected_cls: HTTPStatus, expected_model: type[BaseModel] -) -> BaseModel: - data, _ = await assert_status(resp, expected_cls) - return expected_model.model_validate(data) - - -@pytest.mark.acceptance_test() -async def test_workflow( - client: TestClient, - user_project: ProjectDict, - request_update_project: Callable[[TestClient, UUID], Awaitable], - mock_dynamic_scheduler: None, - director_v2_service_mock: None, -): - # pylint: disable=too-many-statements - - project_uuid = user_project["uuid"] - - # get existing project - resp = await client.get(f"/{VX}/projects/{project_uuid}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - project = ProjectGet.model_validate(data) - assert project.uuid == UUID(project_uuid) - - # - # list repos i.e. versioned projects - resp = await client.get(f"/{VX}/repos/projects") - data, _ = await assert_status(resp, status.HTTP_200_OK) - - assert data == [] - - # - # CREATE a checkpoint - resp = await client.post( - f"/{VX}/repos/projects/{project_uuid}/checkpoints", - json={"tag": "v1", "message": "init"}, - ) - data, _ = await assert_status(resp, status.HTTP_201_CREATED) - - assert data - checkpoint1 = CheckpointApiModel.model_validate(data) # NOTE: this is NOT API model - - # - # this project now has a repo - resp = await client.get(f"/{VX}/repos/projects") - page = await _assert_resp_page( - resp, expected_page_cls=Page[ProjectDict], expected_total=1, expected_count=1 - ) - - repo = RepoApiModel.model_validate(page.data[0]) - assert repo.project_uuid == UUID(project_uuid) - - # GET checkpoint with HEAD - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/HEAD") - data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.model_validate(data) == checkpoint1 - - # TODO: GET checkpoint with tag - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/v1") - with pytest.raises(aiohttp.ClientResponseError) as excinfo: - resp.raise_for_status() - - assert CheckpointApiModel.model_validate(data) == checkpoint1 - - assert excinfo.value.status == status.HTTP_501_NOT_IMPLEMENTED - - # GET checkpoint with id - resp = await client.get( - f"/{VX}/repos/projects/{project_uuid}/checkpoints/{checkpoint1.id}" - ) - assert f"{resp.url}" == f"{checkpoint1.url}" - assert CheckpointApiModel.model_validate(data) == checkpoint1 - - # LIST checkpoints - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints") - page = await _assert_resp_page( - resp, - expected_page_cls=Page[CheckpointApiModel], - expected_total=1, - expected_count=1, - ) - - assert CheckpointApiModel.model_validate(page.data[0]) == checkpoint1 - # UPDATE checkpoint annotations - resp = await client.patch( - f"/{VX}/repos/projects/{project_uuid}/checkpoints/{checkpoint1.id}", - json={"message": "updated message", "tag": "Version 1"}, - ) - data, _ = await assert_status(resp, status.HTTP_200_OK) - checkpoint1_updated = CheckpointApiModel.model_validate(data) - - assert checkpoint1.id == checkpoint1_updated.id - assert checkpoint1.checksum == checkpoint1_updated.checksum - assert checkpoint1_updated.tags == ("Version 1",) - assert checkpoint1_updated.message == "updated message" - - # GET view - resp = await client.get( - f"/{VX}/repos/projects/{project_uuid}/checkpoints/HEAD/workbench/view" - ) - data, _ = await assert_status(resp, status.HTTP_200_OK) - assert ( - data["workbench"] - == project.model_dump(exclude_none=True, exclude_unset=True)["workbench"] - ) - - # do some changes in project - await request_update_project(client, project.uuid) - - # CREATE new checkpoint - resp = await client.post( - f"/{VX}/repos/projects/{project_uuid}/checkpoints", - json={"tag": "v2", "message": "new commit"}, - ) - data, _ = await assert_status(resp, status.HTTP_201_CREATED) - checkpoint2 = CheckpointApiModel.model_validate(data) - assert checkpoint2.tags == ("v2",) - - # GET checkpoint with HEAD - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/HEAD") - data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.model_validate(data) == checkpoint2 - - # CHECKOUT - resp = await client.post( - f"/{VX}/repos/projects/{project_uuid}/checkpoints/{checkpoint1.id}:checkout" - ) - data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.model_validate(data) == checkpoint1_updated - - # GET checkpoint with HEAD - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/HEAD") - data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.model_validate(data) == checkpoint1_updated - - # get working copy - resp = await client.get(f"/{VX}/projects/{project_uuid}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - project_wc = ProjectGet.model_validate(data) - assert project_wc.uuid == UUID(project_uuid) - assert project_wc != project - - -async def test_create_checkpoint_without_changes( - client: TestClient, project_uuid: UUID -): - # CREATE a checkpoint - resp = await client.post( - f"/{VX}/repos/projects/{project_uuid}/checkpoints", - json={"tag": "v1", "message": "first commit"}, - ) - data, _ = await assert_status(resp, status.HTTP_201_CREATED) - - assert data - checkpoint1 = CheckpointApiModel.model_validate(data) # NOTE: this is NOT API model - - # CREATE checkpoint WITHOUT changes - resp = await client.post( - f"/{VX}/repos/projects/{project_uuid}/checkpoints", - json={"tag": "v2", "message": "second commit"}, - ) - data, _ = await assert_status(resp, status.HTTP_201_CREATED) - - assert data - checkpoint2 = CheckpointApiModel.model_validate(data) # NOTE: this is NOT API model - - assert ( - checkpoint1 == checkpoint2 - ), "Consecutive create w/o changes shall not add a new checkpoint" - - -async def test_delete_project_and_repo( - client: TestClient, - user_id: UserID, - project_uuid: ProjectID, - request_delete_project: Callable[[TestClient, UUID], Awaitable], -): - - # CREATE a checkpoint - resp = await client.post( - f"/{VX}/repos/projects/{project_uuid}/checkpoints", - json={"tag": "v1", "message": "first commit"}, - ) - data, _ = await assert_status(resp, status.HTTP_201_CREATED) - - # LIST - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints") - await _assert_resp_page( - resp, - expected_page_cls=Page[CheckpointApiModel], - expected_total=1, - expected_count=1, - ) - - # DELETE project -> projects_vc_* deletion follow - await request_delete_project(client, project_uuid) - - # TMP fix here waits ------------ - # FIXME: mark as deleted, still gets entrypoints!! - from simcore_service_webserver.projects import projects_service - - delete_task = projects_service.get_delete_project_task(project_uuid, user_id) - assert delete_task - await delete_task - # -------------------------------- - - # LIST empty - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints") - await _assert_resp_page( - resp, - expected_page_cls=Page[CheckpointApiModel], - expected_total=0, - expected_count=0, - ) - - # GET HEAD - resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/HEAD") - await assert_status(resp, status.HTTP_404_NOT_FOUND) diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_tags.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_tags.py deleted file mode 100644 index 1e77bba0383..00000000000 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_tags.py +++ /dev/null @@ -1,13 +0,0 @@ -from faker import Faker -from simcore_service_webserver.version_control.vc_tags import ( - compose_workcopy_project_tag_name, - parse_workcopy_project_tag_name, -) - - -def test_parse_and_compose_tag_names(faker: Faker): - - workcopy_project_id = faker.uuid4(cast_to=None) - - tag = compose_workcopy_project_tag_name(workcopy_project_id) - assert parse_workcopy_project_tag_name(tag) == workcopy_project_id diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py index f4b2df540ae..1ba0859bab3 100644 --- a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py +++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py @@ -23,7 +23,7 @@ from servicelib.aiohttp import status from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.projects._groups_db import ( +from simcore_service_webserver.projects._groups_repository import ( GroupID, update_or_insert_project_group, ) @@ -274,7 +274,7 @@ async def test_project_folder_movement_full_workflow( @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) @@ -387,11 +387,11 @@ def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: autospec=True, ) mocker.patch( - "simcore_service_webserver.projects.projects_service.remove_project_dynamic_services", + "simcore_service_webserver.projects._projects_service.remove_project_dynamic_services", autospec=True, ) mocker.patch( - "simcore_service_webserver.projects._crud_api_delete.api.delete_pipeline", + "simcore_service_webserver.projects._crud_api_delete.director_v2_service.delete_pipeline", autospec=True, ) return mocker.patch( diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders_repository.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders_repository.py new file mode 100644 index 00000000000..c1485b0a2af --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders_repository.py @@ -0,0 +1,82 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from typing import Any + +import arrow +import pytest +from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole +from models_library.products import ProductName +from simcore_service_webserver.folders import _folders_repository + + +@pytest.fixture +def user_role(): + return UserRole.USER + + +@pytest.fixture +def product_name(): + return "osparc" + + +async def test_batch_get_trashed_by_primary_gid( + client: TestClient, + logged_user: dict[str, Any], + product_name: ProductName, +): + assert client.app + + # Create two folders + folder_1 = await _folders_repository.create( + client.app, + created_by_gid=logged_user["primary_gid"], + folder_name="Folder 1", + product_name=product_name, + parent_folder_id=None, + user_id=logged_user["id"], + workspace_id=None, + ) + folder_2 = await _folders_repository.create( + client.app, + created_by_gid=logged_user["primary_gid"], + folder_name="Folder 2", + product_name=product_name, + parent_folder_id=None, + user_id=logged_user["id"], + workspace_id=None, + ) + + # Update the trashed flag for folder_1 + await _folders_repository.update( + client.app, + folders_id_or_ids=folder_1.folder_id, + product_name=product_name, + trashed=arrow.now().datetime, + trashed_explicitly=True, + trashed_by=logged_user["id"], + ) + + # Test batch_get_trashed_by_primary_gid + trashed_by_primary_gid = await _folders_repository.batch_get_trashed_by_primary_gid( + client.app, + folders_ids=[folder_1.folder_id, folder_2.folder_id], + ) + assert trashed_by_primary_gid == [logged_user["primary_gid"], None] + + # flipped + trashed_by_primary_gid = await _folders_repository.batch_get_trashed_by_primary_gid( + client.app, + folders_ids=[folder_2.folder_id, folder_1.folder_id], + ) + assert trashed_by_primary_gid == [None, logged_user["primary_gid"]] + + # repeated + trashed_by_primary_gid = await _folders_repository.batch_get_trashed_by_primary_gid( + client.app, + folders_ids=[folder_1.folder_id] * 3, + ) + assert trashed_by_primary_gid == [logged_user["primary_gid"]] * 3 diff --git a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py index 79969076f92..f5b37b61960 100644 --- a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py @@ -44,12 +44,11 @@ from simcore_service_webserver.login.plugin import setup_login from simcore_service_webserver.notifications.plugin import setup_notifications from simcore_service_webserver.products.plugin import setup_products -from simcore_service_webserver.projects.exceptions import ProjectNotFoundError -from simcore_service_webserver.projects.plugin import setup_projects -from simcore_service_webserver.projects.projects_service import ( +from simcore_service_webserver.projects._projects_service import ( remove_project_dynamic_services, submit_delete_project_task, ) +from simcore_service_webserver.projects.plugin import setup_projects from simcore_service_webserver.rabbitmq import setup_rabbitmq from simcore_service_webserver.resource_manager.plugin import setup_resource_manager from simcore_service_webserver.resource_manager.registry import ( @@ -106,7 +105,6 @@ def app_environment( monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict, ) -> EnvVarsDict: - # NOTE: undos some app_environment settings monkeypatch.delenv("WEBSERVER_GARBAGE_COLLECTOR", raising=False) app_environment.pop("WEBSERVER_GARBAGE_COLLECTOR", None) @@ -653,7 +651,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t async def mocked_notification_system(mocker): mocks = {} mocked_notification_system = mocker.patch( - "simcore_service_webserver.projects.projects_service.retrieve_and_notify_project_locked_state", + "simcore_service_webserver.projects._projects_service.retrieve_and_notify_project_locked_state", return_value=Future(), ) mocked_notification_system.return_value.set_result("") @@ -946,14 +944,13 @@ async def test_regression_removing_unexisting_user( app=client.app, simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, ) - with pytest.raises(ProjectNotFoundError): - await remove_project_dynamic_services( - user_id=user_id, - project_uuid=empty_user_project["uuid"], - app=client.app, - user_name={"first_name": "my name is", "last_name": "pytest"}, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - ) + await remove_project_dynamic_services( + user_id=user_id, + project_uuid=empty_user_project["uuid"], + app=client.app, + user_name={"first_name": "my name is", "last_name": "pytest"}, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + ) # since the call to delete is happening as fire and forget task, let's wait until it is done async for attempt in AsyncRetrying(**_TENACITY_ASSERT_RETRY): with attempt: diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/conftest.py b/services/web/server/tests/unit/with_dbs/04/licenses/conftest.py index 5971ed9f168..02a5346f296 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/conftest.py @@ -6,6 +6,7 @@ import pytest from aiohttp.test_utils import TestClient from simcore_postgres_database.models.licensed_items import licensed_items +from simcore_postgres_database.models.licensed_resources import licensed_resources from simcore_postgres_database.models.resource_tracker_pricing_plans import ( resource_tracker_pricing_plans, ) @@ -25,15 +26,15 @@ async def pricing_plan_id( resource_tracker_pricing_plans.insert() .values( product_name=osparc_product_name, - display_name="ISolve Thermal", + display_name="VIP Model A", description="", - classification="TIER", + classification="LICENSE", is_active=True, - pricing_plan_key="isolve-thermal", + pricing_plan_key="vip-model-a", ) .returning(resource_tracker_pricing_plans.c.pricing_plan_id) ) - row = result.first() + row = result.one() assert row @@ -42,3 +43,17 @@ async def pricing_plan_id( async with transaction_context(get_asyncpg_engine(client.app)) as conn: await conn.execute(licensed_items.delete()) await conn.execute(resource_tracker_pricing_plans.delete()) + + +@pytest.fixture +async def ensure_empty_licensed_resources(client: TestClient): + async def _cleanup(): + assert client.app + async with transaction_context(get_asyncpg_engine(client.app)) as conn: + await conn.execute(licensed_resources.delete()) + + await _cleanup() + + yield + + await _cleanup() diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_itis_vip_models.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_itis_vip_models.py new file mode 100644 index 00000000000..c73feba511b --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_itis_vip_models.py @@ -0,0 +1,81 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import datetime +from typing import Any + +import pytest +from faker import Faker +from pydantic import ValidationError +from pytest_simcore.helpers.faker_factories import ( + random_itis_vip_available_download_item, +) +from simcore_service_webserver.licenses._itis_vip_models import ( + ItisVipData, + ItisVipResourceData, + _feature_descriptor_to_dict, +) + + +def test_pre_validator_feature_descriptor_to_dict(): + # Makes sure the regex used here, which is vulnerable to polynomial runtime due to backtracking, cannot lead to denial of service. + with pytest.raises(ValidationError) as err_info: + _feature_descriptor_to_dict("a" * 10000 + ": " + "b" * 10000) + assert err_info.value.errors()[0]["type"] == "string_too_long" + + +@pytest.mark.parametrize( + "features_str,expected", + [ + ( + # checks fix: regex expected at least one space after `:` + "{species:Mouse, functionality:Static, height:95 mm, date: 2012-01-01, name:Male OF1 Mouse, sex:Male, version:1.0, weight:35.5 g}", + { + "version": "1.0", + "weight": "35.5 g", + "species": "Mouse", + "functionality": "Static", + }, + ), + ( + # Checks spaces before `,` are removed + "{date: 2012-01-01, name: Male OF1 Mouse , sex:Male}", + { + "date": datetime.date(2012, 1, 1), + "name": "Male OF1 Mouse", + "sex": "Male", + }, + ), + ], +) +def test_validation_of_itis_vip_response_model( + faker: Faker, features_str: str, expected: dict[str, Any] +): + + available_download = random_itis_vip_available_download_item( + identifier=0, + fake=faker, + Features=features_str, + ) + + vip_data = ItisVipData.model_validate(available_download) + + # Checks how features BeforeValidator and parser + assert {k: vip_data.features[k] for k in expected} == expected + + # Dumped as in the source + assert vip_data.model_dump(by_alias=True)["Features"] == vip_data.features + + license_resource_data = ItisVipResourceData.model_validate( + { + "category_id": "SomeCategoryID", + "category_display": "This is a resource", + "source": vip_data, + } + ) + + assert license_resource_data.source.features == vip_data.features diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_itis_vip_service.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_itis_vip_service.py new file mode 100644 index 00000000000..5923a30f9e2 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_itis_vip_service.py @@ -0,0 +1,218 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from collections.abc import Iterator + +import pytest +import respx +from aiohttp.test_utils import TestClient +from faker import Faker +from httpx import AsyncClient +from models_library.licenses import LicensedResourceType +from pydantic import ValidationError +from pytest_mock import MockerFixture +from pytest_simcore.helpers.faker_factories import ( + random_itis_vip_available_download_item, +) +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.aiohttp import status +from simcore_service_webserver.licenses import ( + _itis_vip_service, + _itis_vip_syncer_service, + _licensed_resources_service, +) +from simcore_service_webserver.licenses._itis_vip_models import ItisVipData +from simcore_service_webserver.licenses._itis_vip_service import _ItisVipApiResponse +from simcore_service_webserver.licenses._itis_vip_settings import ItisVipSettings +from simcore_service_webserver.licenses._licensed_resources_service import ( + RegistrationState, +) + + +@pytest.fixture(scope="session") +def fake_api_base_url() -> str: + return "https://testserver-itis-vip.xyz" + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + fake_api_base_url: str, + mocker: MockerFixture, +): + # prevents syncer setup + mocker.patch( + "simcore_service_webserver.licenses.plugin._itis_vip_syncer_service.setup_itis_vip_syncer", + autospec=True, + ) + + return app_environment | setenvs_from_dict( + monkeypatch, + { + "LICENSES_ITIS_VIP_API_URL": f"{fake_api_base_url}/PD_DirectDownload/getDownloadableItems/{{category}}", + # NOTE: ItisVipSettings will decode with json.dumps(). Use " and not ' the json keys!! + "LICENSES_ITIS_VIP_CATEGORIES": '{"ComputationalPantom": "Phantoms", "HumanBodyRegion": "Humans (Regions)"}', + }, + ) + + +@pytest.fixture +def mock_itis_vip_downloadables_api( + faker: Faker, fake_api_base_url: str +) -> Iterator[respx.MockRouter]: + response_data = { + "msg": 0, + "availableDownloads": [ + random_itis_vip_available_download_item( + identifier=i, + features_functionality="Posable", + fake=faker, + ) + for i in range(8) + ], + } + + with respx.mock(base_url=fake_api_base_url) as mock: + mock.post(path__regex=r"/getDownloadableItems/(?P\w+)").respond( + status_code=200, json=response_data + ) + yield mock + + +async def test_fetch_and_validate_itis_vip_api( + mock_itis_vip_downloadables_api: respx.MockRouter, fake_api_base_url: str +): + async with AsyncClient(base_url=fake_api_base_url) as client: + response = await client.post("/getDownloadableItems/ComputationalPantom") + assert response.status_code == status.HTTP_200_OK + response_json = response.json() + + try: + response = _ItisVipApiResponse(**response_json) + except ValidationError as e: + pytest.fail(f"Response validation failed: {e}") + + assert response.msg == 0 + assert len(response.available_downloads) == 8 + + +async def test_get_category_items( + mock_itis_vip_downloadables_api: respx.MockRouter, + app_environment: EnvVarsDict, +): + settings = ItisVipSettings.create_from_envs() + assert settings.LICENSES_ITIS_VIP_CATEGORIES + + async with AsyncClient() as client: + for url, category in zip( + settings.get_urls(), settings.LICENSES_ITIS_VIP_CATEGORIES, strict=True + ): + assert f"{url}".endswith(category) + + items = await _itis_vip_service.get_category_items(client, url) + + assert items[0].features.get("functionality") == "Posable" + + +async def test_sync_itis_vip_as_licensed_resources( + mock_itis_vip_downloadables_api: respx.MockRouter, + app_environment: EnvVarsDict, + client: TestClient, + ensure_empty_licensed_resources: None, +): + assert client.app + + settings = ItisVipSettings.create_from_envs() + assert settings.LICENSES_ITIS_VIP_CATEGORIES + + async with AsyncClient() as http_client: + for url, category in zip( + settings.get_urls(), settings.LICENSES_ITIS_VIP_CATEGORIES, strict=True + ): + assert f"{url}".endswith(category) + + vip_resources: list[ + ItisVipData + ] = await _itis_vip_service.get_category_items(http_client, url) + assert vip_resources[0].features.get("functionality") == "Posable" + + for vip in vip_resources: + + # register a NEW resource + ( + licensed_resource1, + state1, + _, + ) = await _licensed_resources_service.register_licensed_resource( + client.app, + licensed_resource_name=f"{category}/{vip.id}", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data=vip, + licensed_item_display_name="foo", + ) + assert state1 == RegistrationState.NEWLY_REGISTERED + + # register the SAME resource + ( + licensed_resource2, + state2, + _, + ) = await _licensed_resources_service.register_licensed_resource( + client.app, + licensed_resource_name=f"{category}/{vip.id}", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data=vip, + licensed_item_display_name="foo", + ) + + assert state2 == RegistrationState.ALREADY_REGISTERED + assert licensed_resource1 == licensed_resource2 + + # register a MODIFIED version of the same resource + ( + licensed_item3, + state3, + msg, + ) = await _licensed_resources_service.register_licensed_resource( + client.app, + licensed_resource_name=f"{category}/{vip.id}", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data=vip.model_copy( + update={ + "features": { + **vip.features, + "functionality": "Non-Posable", + } + } + ), + licensed_item_display_name="foo", + ) + assert state3 == RegistrationState.DIFFERENT_RESOURCE + assert licensed_resource2 == licensed_item3 + # {'values_changed': {"root['features']['functionality']": {'new_value': 'Non-Posable', 'old_value': 'Posable'}}} + assert "functionality" in msg + + +async def test_itis_vip_syncer_service( + mock_itis_vip_downloadables_api: respx.MockRouter, + app_environment: EnvVarsDict, + client: TestClient, + ensure_empty_licensed_resources: None, +): + assert client.app + + settings = ItisVipSettings.create_from_envs() + assert settings.LICENSES_ITIS_VIP_CATEGORIES + + categories = settings.to_categories() + + # one round + await _itis_vip_syncer_service.sync_licensed_resources(client.app, categories) + + # second round + await _itis_vip_syncer_service.sync_licensed_resources(client.app, categories) diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py index a164c1b6406..7abbd37b296 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py @@ -26,6 +26,8 @@ "licensed_item_purchase_id": "beb16d18-d57d-44aa-a638-9727fa4a72ef", "product_name": "osparc", "licensed_item_id": "303942ef-6d31-4ba8-afbe-dbb1fce2a953", + "key": "Duke", + "version": "1.0.0", "wallet_id": 1, "wallet_name": "My Wallet", "pricing_unit_cost_id": 1, diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py index 4f68cdce9a8..01df2519fe6 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py @@ -4,19 +4,31 @@ # pylint: disable=too-many-arguments # pylint: disable=too-many-statements -import arrow +import copy + import pytest from aiohttp.test_utils import TestClient -from models_library.licensed_items import ( +from models_library.licenses import ( VIP_DETAILS_EXAMPLE, - LicensedItemUpdateDB, + LicensedItemPatchDB, LicensedResourceType, ) from models_library.rest_ordering import OrderBy from pytest_simcore.helpers.webserver_login import UserInfoDict +from simcore_postgres_database.models.licensed_item_to_resource import ( + licensed_item_to_resource, +) +from simcore_postgres_database.utils_repos import transaction_context from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.licenses import _licensed_items_repository -from simcore_service_webserver.licenses.errors import LicensedItemNotFoundError +from simcore_service_webserver.db.plugin import get_asyncpg_engine +from simcore_service_webserver.licenses import ( + _licensed_items_repository, + _licensed_resources_repository, +) +from simcore_service_webserver.licenses.errors import ( + LicensedItemNotFoundError, + LicensedKeyVersionNotFoundError, +) from simcore_service_webserver.projects.models import ProjectDict @@ -25,7 +37,7 @@ def user_role() -> UserRole: return UserRole.USER -async def test_licensed_items_db_crud( +async def test_licensed_items_db_domain_crud( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, @@ -46,10 +58,10 @@ async def test_licensed_items_db_crud( got = await _licensed_items_repository.create( client.app, product_name=osparc_product_name, - display_name="Model A Display Name", - licensed_resource_name="Model A", + display_name="Renting A Display Name", + key="Duke", + version="1.0.0", licensed_resource_type=LicensedResourceType.VIP_MODEL, - licensed_resource_data=VIP_DETAILS_EXAMPLE, pricing_plan_id=pricing_plan_id, ) licensed_item_id = got.licensed_item_id @@ -69,13 +81,13 @@ async def test_licensed_items_db_crud( licensed_item_id=licensed_item_id, product_name=osparc_product_name, ) - assert got.licensed_resource_name == "Model A" + assert got.display_name == "Renting A Display Name" await _licensed_items_repository.update( client.app, licensed_item_id=licensed_item_id, product_name=osparc_product_name, - updates=LicensedItemUpdateDB(licensed_resource_name="Model B"), + updates=LicensedItemPatchDB(display_name="Renting B Display Name"), ) got = await _licensed_items_repository.get( @@ -83,7 +95,7 @@ async def test_licensed_items_db_crud( licensed_item_id=licensed_item_id, product_name=osparc_product_name, ) - assert got.licensed_resource_name == "Model B" + assert got.display_name == "Renting B Display Name" got = await _licensed_items_repository.delete( client.app, @@ -99,7 +111,7 @@ async def test_licensed_items_db_crud( ) -async def test_licensed_items_db_trash( +async def test_licensed_items_domain_listing( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, @@ -107,77 +119,134 @@ async def test_licensed_items_db_trash( pricing_plan_id: int, ): assert client.app - - # Create two licensed items - licensed_item_ids = [] - for name in ["Model A", "Model B"]: - licensed_item_db = await _licensed_items_repository.create( - client.app, - product_name=osparc_product_name, - display_name="Model A Display Name", - licensed_resource_name=name, - licensed_resource_type=LicensedResourceType.VIP_MODEL, - licensed_resource_data=VIP_DETAILS_EXAMPLE, - pricing_plan_id=pricing_plan_id, - ) - licensed_item_ids.append(licensed_item_db.licensed_item_id) - - # Trash one licensed item - trashing_at = arrow.now().datetime - trashed_item = await _licensed_items_repository.update( + total_count, items = await _licensed_items_repository.list_licensed_items( client.app, - licensed_item_id=licensed_item_ids[0], product_name=osparc_product_name, - updates=LicensedItemUpdateDB(trash=True), + offset=0, + limit=10, + order_by=OrderBy(field="modified"), ) + assert total_count == 0 + assert not items - assert trashed_item.licensed_item_id == licensed_item_ids[0] - assert trashed_item.trashed - assert trashing_at < trashed_item.trashed - assert trashed_item.trashed < arrow.now().datetime - - # List with filter_trashed include - total_count, items = await _licensed_items_repository.list_( + got_duke1 = await _licensed_items_repository.create( client.app, product_name=osparc_product_name, - offset=0, - limit=10, - order_by=OrderBy(field="display_name"), - trashed="include", + display_name="Renting Duke 1.0.0 Display Name", + key="Duke", + version="1.0.0", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + pricing_plan_id=pricing_plan_id, ) - assert total_count == 2 - assert {i.licensed_item_id for i in items} == set(licensed_item_ids) - # List with filter_trashed exclude - total_count, items = await _licensed_items_repository.list_( + got_duke2 = await _licensed_items_repository.create( client.app, product_name=osparc_product_name, - offset=0, - limit=10, - order_by=OrderBy(field="display_name"), - trashed="exclude", + display_name="Renting Duke 2.0.0 Display Name", + key="Duke", + version="2.0.0", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + pricing_plan_id=pricing_plan_id, ) - assert total_count == 1 - assert items[0].licensed_item_id == licensed_item_ids[1] - assert items[0].trashed is None - # List with filter_trashed all - total_count, items = await _licensed_items_repository.list_( + # Create Licensed Resource with licensed key and version (Duke V1) + example_duke1 = copy.deepcopy(VIP_DETAILS_EXAMPLE) + example_duke1["license_key"] = "ABC" + example_duke1["license_version"] = "1.0.0" + example_duke1["id"] = 1 + + got_licensed_resource_duke1 = ( + await _licensed_resources_repository.create_if_not_exists( + client.app, + display_name="Duke 1", + licensed_resource_name="Duke 1", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data=example_duke1, + ) + ) + + example_duke1_different_id = copy.deepcopy(VIP_DETAILS_EXAMPLE) + example_duke1_different_id["license_key"] = "ABC" + example_duke1_different_id["license_version"] = "1.0.0" + example_duke1_different_id["id"] = 2 + + # Create Licensed Resource with the same licensed key and version (Duke V1) but different external ID + got_licensed_resource_duke1_different_id = ( + await _licensed_resources_repository.create_if_not_exists( + client.app, + display_name="Duke 1 (different external ID)", + licensed_resource_name="Duke 1 different external ID", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data=example_duke1_different_id, + ) + ) + + example_duke2 = copy.deepcopy(VIP_DETAILS_EXAMPLE) + example_duke2["license_key"] = "ABC" + example_duke2["license_version"] = "2.0.0" + example_duke2["id"] = 3 + + # Create Licensed Resource with the same licensed key but different version (Duke V2) + got_licensed_resource_duke2 = ( + await _licensed_resources_repository.create_if_not_exists( + client.app, + display_name="Duke 2", + licensed_resource_name="Duke 2", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data=example_duke2, + ) + ) + + # Connect them via licensed_item_to_resorce DB table + async with transaction_context(get_asyncpg_engine(client.app)) as conn: + await conn.execute( + licensed_item_to_resource.insert(), + [ + { + "licensed_item_id": got_duke1.licensed_item_id, + "licensed_resource_id": got_licensed_resource_duke1.licensed_resource_id, + "product_name": osparc_product_name, + }, + { + "licensed_item_id": got_duke1.licensed_item_id, + "licensed_resource_id": got_licensed_resource_duke1_different_id.licensed_resource_id, + "product_name": osparc_product_name, + }, + { + "licensed_item_id": got_duke2.licensed_item_id, + "licensed_resource_id": got_licensed_resource_duke2.licensed_resource_id, + "product_name": osparc_product_name, + }, + ], + ) + + total_count, items = await _licensed_items_repository.list_licensed_items( client.app, product_name=osparc_product_name, offset=0, limit=10, order_by=OrderBy(field="display_name"), - trashed="only", ) - assert total_count == 1 - assert items[0].licensed_item_id == trashed_item.licensed_item_id - assert items[0].trashed + assert total_count == 2 + assert items[0].licensed_item_id == got_duke1.licensed_item_id + assert len(items[0].licensed_resources) == 2 + assert items[1].licensed_item_id == got_duke2.licensed_item_id + assert len(items[1].licensed_resources) == 1 - # Get the trashed licensed item - got = await _licensed_items_repository.get( - client.app, - licensed_item_id=trashed_item.licensed_item_id, - product_name=osparc_product_name, + got = await _licensed_items_repository.get_licensed_item_by_key_version( + client.app, key="Duke", version="1.0.0", product_name=osparc_product_name + ) + assert got.display_name == "Renting Duke 1.0.0 Display Name" + + got = await _licensed_items_repository.get_licensed_item_by_key_version( + client.app, key="Duke", version="2.0.0", product_name=osparc_product_name ) - assert got == trashed_item + assert got.display_name == "Renting Duke 2.0.0 Display Name" + + with pytest.raises(LicensedKeyVersionNotFoundError): + await _licensed_items_repository.get_licensed_item_by_key_version( + client.app, + key="Non-Existing", + version="2.0.0", + product_name=osparc_product_name, + ) diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py index 2f00b9733ff..914187e5d4c 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py @@ -3,22 +3,39 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments # pylint: disable=too-many-statements +from decimal import Decimal from http import HTTPStatus import pytest from aiohttp.test_utils import TestClient +from models_library.api_schemas_resource_usage_tracker import ( + licensed_items_purchases as rut_licensed_items_purchases, +) from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( - PricingUnitGet, + RutPricingPlanGet, + RutPricingUnitGet, ) from models_library.api_schemas_webserver.licensed_items import LicensedItemRestGet +from models_library.api_schemas_webserver.licensed_items_purchases import ( + LicensedItemPurchaseGet, +) from models_library.api_schemas_webserver.wallets import WalletGetWithAvailableCredits -from models_library.licensed_items import VIP_DETAILS_EXAMPLE, LicensedResourceType +from models_library.licenses import VIP_DETAILS_EXAMPLE, LicensedResourceType from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status +from simcore_postgres_database.models.licensed_item_to_resource import ( + licensed_item_to_resource, +) +from simcore_postgres_database.models.licensed_items import licensed_items +from simcore_postgres_database.utils_repos import transaction_context from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.licenses import _licensed_items_repository +from simcore_service_webserver.db.plugin import get_asyncpg_engine +from simcore_service_webserver.licenses import ( + _licensed_items_repository, + _licensed_resources_repository, +) from simcore_service_webserver.projects.models import ProjectDict @@ -41,35 +58,92 @@ async def test_licensed_items_listing( licensed_item_db = await _licensed_items_repository.create( client.app, + key="Duke", + version="1.0.0", product_name=osparc_product_name, display_name="Model A display name", - licensed_resource_name="Model A", licensed_resource_type=LicensedResourceType.VIP_MODEL, pricing_plan_id=pricing_plan_id, - licensed_resource_data=VIP_DETAILS_EXAMPLE, ) _licensed_item_id = licensed_item_db.licensed_item_id + got_licensed_resource_duke = ( + await _licensed_resources_repository.create_if_not_exists( + client.app, + display_name="Duke", + licensed_resource_name="Duke", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data={ + "category_id": "HumanWholeBody", + "category_display": "Humans", + "source": VIP_DETAILS_EXAMPLE, + }, + ) + ) + + # Connect them via licensed_item_to_resorce DB table + async with transaction_context(get_asyncpg_engine(client.app)) as conn: + await conn.execute( + licensed_item_to_resource.insert().values( + licensed_item_id=_licensed_item_id, + licensed_resource_id=got_licensed_resource_duke.licensed_resource_id, + product_name=osparc_product_name, + ) + ) + # list url = client.app.router["list_licensed_items"].url_for() resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert LicensedItemRestGet(**data[0]) - assert data[0]["licensedResourceData"][ - "additionalField" - ] # <-- Testing nested camel case - assert data[0]["licensedResourceData"]["features"][ - "additionalField" - ] # <-- Testing nested camel case - - # get - url = client.app.router["get_licensed_item"].url_for( - licensed_item_id=f"{_licensed_item_id}" - ) + + # <-- Testing nested camel case + source = data[0]["licensedResources"][0]["source"] + assert all("_" not in key for key in source), f"got {source=}" + + # Testing trimmed + assert "additionalField" not in source + assert "additional_field" not in source + + # Testing hidden flag + async with transaction_context(get_asyncpg_engine(client.app)) as conn: + await conn.execute( + licensed_items.update() + .values( + is_hidden_on_market=True, + ) + .where(licensed_items.c.licensed_item_id == _licensed_item_id) + ) + + url = client.app.router["list_licensed_items"].url_for() resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert LicensedItemRestGet(**data) + assert data == [] + + +_LICENSED_ITEM_PURCHASE_GET = ( + rut_licensed_items_purchases.LicensedItemPurchaseGet.model_validate( + { + "licensed_item_purchase_id": "beb16d18-d57d-44aa-a638-9727fa4a72ef", + "product_name": "osparc", + "licensed_item_id": "303942ef-6d31-4ba8-afbe-dbb1fce2a953", + "key": "Duke", + "version": "1.0.0", + "wallet_id": 1, + "wallet_name": "My Wallet", + "pricing_unit_cost_id": 1, + "pricing_unit_cost": Decimal(10), + "start_at": "2023-01-11 13:11:47.293595", + "expire_at": "2023-01-11 13:11:47.293595", + "num_of_seats": 1, + "purchased_by_user": 1, + "user_email": "test@test.com", + "purchased_at": "2023-01-11 13:11:47.293595", + "modified": "2023-01-11 13:11:47.293595", + } + ) +) @pytest.fixture @@ -78,25 +152,31 @@ def mock_licensed_items_purchase_functions(mocker: MockerFixture) -> tuple: "simcore_service_webserver.licenses._licensed_items_service.get_wallet_with_available_credits_by_user_and_wallet", spec=True, return_value=WalletGetWithAvailableCredits.model_validate( - WalletGetWithAvailableCredits.model_config["json_schema_extra"]["examples"][ - 0 - ] + WalletGetWithAvailableCredits.model_json_schema()["examples"][0] + ), + ) + mock_get_pricing_plan = mocker.patch( + "simcore_service_webserver.licenses._licensed_items_service.get_pricing_plan", + spec=True, + return_value=RutPricingPlanGet.model_validate( + RutPricingPlanGet.model_json_schema()["examples"][2] ), ) mock_get_pricing_unit = mocker.patch( "simcore_service_webserver.licenses._licensed_items_service.get_pricing_plan_unit", spec=True, - return_value=PricingUnitGet.model_validate( - PricingUnitGet.model_config["json_schema_extra"]["examples"][0] + return_value=RutPricingUnitGet.model_validate( + RutPricingUnitGet.model_json_schema()["examples"][2] ), ) mock_create_licensed_item_purchase = mocker.patch( "simcore_service_webserver.licenses._licensed_items_service.licensed_items_purchases.create_licensed_item_purchase", - spec=True, + return_value=_LICENSED_ITEM_PURCHASE_GET, ) return ( mock_wallet_credits, + mock_get_pricing_plan, mock_get_pricing_unit, mock_create_licensed_item_purchase, ) @@ -116,22 +196,38 @@ async def test_licensed_items_purchase( licensed_item_db = await _licensed_items_repository.create( client.app, + key="Duke", + version="1.0.0", product_name=osparc_product_name, display_name="Model A display name", - licensed_resource_name="Model A", licensed_resource_type=LicensedResourceType.VIP_MODEL, pricing_plan_id=pricing_plan_id, - licensed_resource_data=VIP_DETAILS_EXAMPLE, ) _licensed_item_id = licensed_item_db.licensed_item_id - # get - url = client.app.router["get_licensed_item"].url_for( - licensed_item_id=f"{_licensed_item_id}" + got_licensed_resource_duke = ( + await _licensed_resources_repository.create_if_not_exists( + client.app, + display_name="Duke", + licensed_resource_name="Duke", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data={ + "category_id": "HumanWholeBody", + "category_display": "Humans", + "source": VIP_DETAILS_EXAMPLE, + }, + ) ) - resp = await client.get(f"{url}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - assert LicensedItemRestGet(**data) + + # Connect them via licensed_item_to_resorce DB table + async with transaction_context(get_asyncpg_engine(client.app)) as conn: + await conn.execute( + licensed_item_to_resource.insert().values( + licensed_item_id=_licensed_item_id, + licensed_resource_id=got_licensed_resource_duke.licensed_resource_id, + product_name=osparc_product_name, + ) + ) # purchase url = client.app.router["purchase_licensed_item"].url_for( @@ -146,4 +242,5 @@ async def test_licensed_items_purchase( "pricing_unit_id": 1, }, ) - await assert_status(resp, status.HTTP_204_NO_CONTENT) + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert LicensedItemPurchaseGet(**data) diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_resources_repository.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_resources_repository.py new file mode 100644 index 00000000000..22069d92914 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_resources_repository.py @@ -0,0 +1,59 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +import arrow +import pytest +from aiohttp.test_utils import TestClient +from models_library.licenses import ( + VIP_DETAILS_EXAMPLE, + LicensedResourcePatchDB, + LicensedResourceType, +) +from pytest_simcore.helpers.webserver_login import UserInfoDict +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.licenses import _licensed_resources_repository +from simcore_service_webserver.projects.models import ProjectDict + + +@pytest.fixture +def user_role() -> UserRole: + return UserRole.USER + + +async def test_licensed_items_db_trash( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + osparc_product_name: str, + pricing_plan_id: int, +): + assert client.app + + # Create two licensed items + licensed_resource_ids = [] + for name in ["Model A", "Model B"]: + licensed_resource_db = ( + await _licensed_resources_repository.create_if_not_exists( + client.app, + display_name="Model A Display Name", + licensed_resource_name=name, + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data=VIP_DETAILS_EXAMPLE, + ) + ) + licensed_resource_ids.append(licensed_resource_db.licensed_resource_id) + + # Trash one licensed item + trashing_at = arrow.now().datetime + trashed_item = await _licensed_resources_repository.update( + client.app, + licensed_resource_id=licensed_resource_ids[0], + updates=LicensedResourcePatchDB(trash=True), + ) + + assert trashed_item.licensed_resource_id == licensed_resource_ids[0] + assert trashed_item.trashed + assert trashing_at < trashed_item.trashed + assert trashed_item.trashed < arrow.now().datetime diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py index fcfcbf2479e..65bb4ae66c4 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py @@ -11,7 +11,7 @@ LicensedItemCheckoutGet, ) from models_library.api_schemas_webserver.licensed_items import LicensedItemRpcGetPage -from models_library.licensed_items import VIP_DETAILS_EXAMPLE, LicensedResourceType +from models_library.licenses import VIP_DETAILS_EXAMPLE, LicensedResourceType from models_library.products import ProductName from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -25,9 +25,17 @@ release_licensed_item_for_wallet, ) from settings_library.rabbit import RabbitSettings +from simcore_postgres_database.models.licensed_item_to_resource import ( + licensed_item_to_resource, +) from simcore_postgres_database.models.users import UserRole +from simcore_postgres_database.utils_repos import transaction_context from simcore_service_webserver.application_settings import ApplicationSettings -from simcore_service_webserver.licenses import _licensed_items_repository +from simcore_service_webserver.db.plugin import get_asyncpg_engine +from simcore_service_webserver.licenses import ( + _licensed_items_repository, + _licensed_resources_repository, +) pytest_simcore_core_services_selection = [ "rabbit", @@ -133,15 +141,40 @@ async def test_license_checkout_workflow( assert len(result.items) == 0 assert result.total == 0 - license_item_db = await _licensed_items_repository.create( + licensed_item_db = await _licensed_items_repository.create( client.app, + key="Duke", + version="1.0.0", product_name=osparc_product_name, display_name="Model A display name", - licensed_resource_name="Model A", licensed_resource_type=LicensedResourceType.VIP_MODEL, pricing_plan_id=pricing_plan_id, - licensed_resource_data=VIP_DETAILS_EXAMPLE, ) + _licensed_item_id = licensed_item_db.licensed_item_id + + got_licensed_resource_duke = ( + await _licensed_resources_repository.create_if_not_exists( + client.app, + display_name="Duke", + licensed_resource_name="Duke", + licensed_resource_type=LicensedResourceType.VIP_MODEL, + licensed_resource_data={ + "category_id": "HumanWholeBody", + "category_display": "Humans", + "source": VIP_DETAILS_EXAMPLE, + }, + ) + ) + + # Connect them via licensed_item_to_resorce DB table + async with transaction_context(get_asyncpg_engine(client.app)) as conn: + await conn.execute( + licensed_item_to_resource.insert().values( + licensed_item_id=_licensed_item_id, + licensed_resource_id=got_licensed_resource_duke.licensed_resource_id, + product_name=osparc_product_name, + ) + ) result = await get_licensed_items( rpc_client, product_name=osparc_product_name, offset=0, limit=20 @@ -163,7 +196,7 @@ async def test_license_checkout_workflow( product_name=osparc_product_name, user_id=logged_user["id"], wallet_id=1, - licensed_item_id=license_item_db.licensed_item_id, + licensed_item_id=licensed_item_db.licensed_item_id, num_of_seats=1, service_run_id="run_1", ) diff --git a/services/web/server/tests/unit/with_dbs/04/products/conftest.py b/services/web/server/tests/unit/with_dbs/04/products/conftest.py index 99f086477a5..236ce3ec224 100644 --- a/services/web/server/tests/unit/with_dbs/04/products/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/products/conftest.py @@ -4,8 +4,15 @@ import pytest +from models_library.products import ProductName from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_webserver.constants import FRONTEND_APP_DEFAULT + + +@pytest.fixture +def default_product_name() -> ProductName: + return FRONTEND_APP_DEFAULT @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_db.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_db.py deleted file mode 100644 index bd399948c14..00000000000 --- a/services/web/server/tests/unit/with_dbs/04/products/test_products_db.py +++ /dev/null @@ -1,153 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - - -from typing import Any - -import pytest -import sqlalchemy as sa -from aiohttp import web -from aiohttp.test_utils import TestClient -from aiopg.sa.result import RowProxy -from pytest_mock import MockerFixture -from simcore_postgres_database import utils_products -from simcore_postgres_database.models.products import ( - EmailFeedback, - Forum, - IssueTracker, - Manual, - Vendor, - WebFeedback, - products, -) -from simcore_service_webserver.db.plugin import APP_AIOPG_ENGINE_KEY -from simcore_service_webserver.products._db import ProductRepository -from simcore_service_webserver.products._middlewares import _get_default_product_name -from simcore_service_webserver.products._model import Product - - -@pytest.fixture -def app(client: TestClient) -> web.Application: - assert client.app - return client.app - - -@pytest.fixture -async def product_row(app: web.Application, product_data: dict[str, Any]) -> RowProxy: - """Injects product_data in products table and returns the associated table's database row - - Note that product_data is a SUBSET of product_row (e.g. modified dattimes etc)! - """ - engine = app[APP_AIOPG_ENGINE_KEY] - assert engine - - async with engine.acquire() as conn: - # writes - insert_stmt = ( - products.insert().values(**product_data).returning(products.c.name) - ) - name = await conn.scalar(insert_stmt) - - # reads - select_stmt = sa.select(products).where(products.c.name == name) - row = await (await conn.execute(select_stmt)).fetchone() - assert row - - return row - - -@pytest.fixture -async def product_repository( - app: web.Application, mocker: MockerFixture -) -> ProductRepository: - assert product_row - - fake_request = mocker.MagicMock() - fake_request.app = app - - return ProductRepository.create_from_request(request=fake_request) - - -@pytest.mark.parametrize( - "product_data", - [ - # DATA introduced by operator e.g. in adminer - { - "name": "tis", - "display_name": "COMPLETE example", - "short_name": "dummy", - "host_regex": r"([\.-]{0,1}dummy[\.-])", - "support_email": "foo@osparc.io", - "twilio_messaging_sid": None, - "vendor": Vendor( - name="ACME", - copyright="© ACME correcaminos", - url="https://acme.com", - license_url="http://docs.acme.app/#/license-terms", - invitation_url="http://docs.acme.app/#/how-to-request-invitation", - ), - "issues": [ - IssueTracker( - label="github", - login_url="https://github.com/ITISFoundation/osparc-simcore", - new_url="https://github.com/ITISFoundation/osparc-simcore/issues/new/choose", - ), - IssueTracker( - label="fogbugz", - login_url="https://fogbugz.com/login", - new_url="https://fogbugz.com/new?project=123", - ), - ], - "manuals": [ - Manual(label="main", url="doc.acme.com"), - Manual(label="z43", url="yet-another-manual.acme.com"), - ], - "support": [ - Forum(label="forum", kind="forum", url="forum.acme.com"), - EmailFeedback(label="email", kind="email", email="support@acme.com"), - WebFeedback(label="web-form", kind="web", url="support.acme.com"), - ], - }, - # Minimal - { - "name": "s4llite", - "display_name": "MINIMAL example", - "short_name": "dummy", - "host_regex": "([\\.-]{0,1}osparc[\\.-])", - "support_email": "support@osparc.io", - }, - ], - ids=lambda d: d["display_name"], -) -async def test_product_repository_get_product( - product_repository: ProductRepository, - product_data: dict[str, Any], - product_row: RowProxy, - app: web.Application, - mocker: MockerFixture, -): - - # check differences between the original product_data and the product_row in database - assert set(product_data.keys()).issubset(set(product_row.keys())) - - common_keys = set(product_data.keys()).intersection(set(product_row.keys())) - assert {k: product_data[k] for k in common_keys} == { - k: product_row[k] for k in common_keys - } - - # check RowProxy -> pydantic's Product - product = Product.model_validate(product_row) - - print(product.model_dump_json(indent=1)) - - # product repo - assert product_repository.engine - - assert await product_repository.get_product(product.name) == product - - # tests definitions of default from utle_products and web-server.products are in sync - async with product_repository.engine.acquire() as conn: - default_product = await utils_products.get_default_product_name(conn) - assert default_product == _get_default_product_name(app) diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_repository.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_repository.py new file mode 100644 index 00000000000..ed4550eee6d --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/products/test_products_repository.py @@ -0,0 +1,262 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import contextlib +from collections.abc import Iterable +from decimal import Decimal +from typing import Any + +import pytest +import sqlalchemy as sa +from aiohttp import web +from aiohttp.test_utils import TestClient, make_mocked_request +from models_library.products import ProductName +from pytest_simcore.helpers.faker_factories import random_product, random_product_price +from pytest_simcore.helpers.postgres_tools import sync_insert_and_get_row_lifespan +from simcore_postgres_database import utils_products +from simcore_postgres_database.models.products import ( + EmailFeedback, + Forum, + IssueTracker, + Manual, + Vendor, + WebFeedback, + products, +) +from simcore_postgres_database.models.products_prices import products_prices +from simcore_postgres_database.utils_products_prices import ProductPriceInfo +from simcore_service_webserver.constants import ( + FRONTEND_APP_DEFAULT, + FRONTEND_APPS_AVAILABLE, +) +from simcore_service_webserver.products._repository import ProductRepository +from simcore_service_webserver.products._web_middlewares import ( + _get_default_product_name, +) +from sqlalchemy.ext.asyncio import AsyncEngine + + +@pytest.fixture(scope="module") +def products_raw_data() -> dict[ProductName, dict[str, Any]]: + adminer_example = { + # DATA introduced by operator e.g. in adminer + "name": "tis", + "display_name": "COMPLETE example", + "short_name": "dummy", + "host_regex": r"([\.-]{0,1}dummy[\.-])", + "support_email": "foo@osparc.io", + "twilio_messaging_sid": None, + "vendor": Vendor( + name="ACME", + copyright="© ACME correcaminos", + url="https://acme.com", + license_url="http://docs.acme.app/#/license-terms", + invitation_url="http://docs.acme.app/#/how-to-request-invitation", + ), + "issues": [ + IssueTracker( + label="github", + login_url="https://github.com/ITISFoundation/osparc-simcore", + new_url="https://github.com/ITISFoundation/osparc-simcore/issues/new/choose", + ), + IssueTracker( + label="fogbugz", + login_url="https://fogbugz.com/login", + new_url="https://fogbugz.com/new?project=123", + ), + ], + "manuals": [ + Manual(label="main", url="doc.acme.com"), + Manual(label="z43", url="yet-another-manual.acme.com"), + ], + "support": [ + Forum(label="forum", kind="forum", url="forum.acme.com"), + EmailFeedback(label="email", kind="email", email="support@acme.com"), + WebFeedback(label="web-form", kind="web", url="support.acme.com"), + ], + } + + minimal_example = { + "name": "s4llite", + "display_name": "MINIMAL example", + "short_name": "dummy", + "host_regex": "([\\.-]{0,1}osparc[\\.-])", + "support_email": "support@osparc.io", + } + + examples = {} + + def _add(data): + assert data["name"] not in examples + assert data.get("group_id") is None # note that group is not assigned + examples.update({data["name"]: data}) + + _add(adminer_example) + _add(minimal_example) + + for name in FRONTEND_APPS_AVAILABLE: + if name not in examples and name != FRONTEND_APP_DEFAULT: + _add(random_product(name=name)) + + return examples + + +@pytest.fixture(scope="module") +def products_prices_raw_data() -> dict[ProductName, dict[str, Any]]: + + return { + "osparc": random_product_price( + product_name="osparc", + # free of charge + usd_per_credit=Decimal(0), + ), + "tis": random_product_price( + product_name="tis", + usd_per_credit=Decimal(0), + ), + } + + +@pytest.fixture(scope="module") +def db_products_table_with_data_before_app( + postgres_db: sa.engine.Engine, + products_raw_data: dict[ProductName, dict[str, Any]], + products_prices_raw_data: dict[ProductName, dict[str, Any]], +) -> Iterable[dict[ProductName, dict[str, Any]]]: + """ + All tests in this module are reading from the database + and the database for products are setup before the app is started + + This fixture replicate those two conditions + """ + + with contextlib.ExitStack() as fixture_stack: + product_to_row: dict[ProductName, dict[str, Any]] = {} + + for product_name, product_values in products_raw_data.items(): + product_row = fixture_stack.enter_context( + sync_insert_and_get_row_lifespan( + postgres_db, + table=products, + values=product_values, + pk_col=products.c.name, + pk_value=product_name, + ) + ) + product_to_row[product_name] = product_row + + if prices := products_prices_raw_data.get(product_name): + fixture_stack.enter_context( + sync_insert_and_get_row_lifespan( + postgres_db, + table=products_prices, + values=prices, + pk_col=products_prices.c.product_name, + pk_value=product_name, + ) + ) + + yield product_to_row + + # will rm products + + +@pytest.fixture +def app( + db_products_table_with_data_before_app: dict[ProductName, dict[str, Any]], + client: TestClient, +) -> web.Application: + assert db_products_table_with_data_before_app + assert client.app + return client.app + + +@pytest.fixture +async def product_repository(app: web.Application) -> ProductRepository: + repo = ProductRepository.create_from_request( + request=make_mocked_request("GET", "/fake", app=app) + ) + assert repo.engine + + return repo + + +async def test_utils_products_and_webserver_default_product_in_sync( + app: web.Application, + product_repository: ProductRepository, + asyncpg_engine: AsyncEngine, +): + # tests definitions of default from utle_products and web-server.products are in sync + async with asyncpg_engine.connect() as conn: + default_product_name = await utils_products.get_default_product_name(conn) + assert default_product_name == _get_default_product_name(app) + + default_product = await product_repository.get_product(default_product_name) + assert default_product + assert default_product.name == default_product_name + + +async def test_product_repository_get_product( + product_repository: ProductRepository, +): + product_name = "tis" + + product = await product_repository.get_product(product_name) + assert product + assert product.name == product_name + + assert await product_repository.get_product("undefined") is None + + +async def test_product_repository_list_products_names( + product_repository: ProductRepository, +): + product_names = await product_repository.list_products_names() + assert isinstance(product_names, list) + assert all(isinstance(name, str) for name in product_names) + + +async def test_product_repository_get_product_latest_price_info_or_none( + product_repository: ProductRepository, +): + product_name = "tis" + price_info = await product_repository.get_product_latest_price_info_or_none( + product_name + ) + assert price_info is None or isinstance(price_info, ProductPriceInfo) + + +async def test_product_repository_get_product_stripe_info( + product_repository: ProductRepository, +): + product_name = "tis" + stripe_info = await product_repository.get_product_stripe_info_or_none(product_name) + assert stripe_info + + product_name = "s4l" + stripe_info = await product_repository.get_product_stripe_info_or_none(product_name) + assert stripe_info is None + + +async def test_product_repository_get_template_content( + product_repository: ProductRepository, +): + template_name = "some_template" + content = await product_repository.get_template_content(template_name) + assert content is None or isinstance(content, str) + + +async def test_product_repository_get_product_template_content( + product_repository: ProductRepository, +): + product_name = "tis" + content = await product_repository.get_product_template_content(product_name) + assert content is None or isinstance(content, str) + + +async def test_product_repository_get_product_ui(product_repository: ProductRepository): + product_name = "tis" + ui = await product_repository.get_product_ui(product_name) + assert ui is None or isinstance(ui, dict) diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_handlers.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_rest.py similarity index 61% rename from services/web/server/tests/unit/with_dbs/04/products/test_products_handlers.py rename to services/web/server/tests/unit/with_dbs/04/products/test_products_rest.py index a36fc493ad6..f9a047ef50e 100644 --- a/services/web/server/tests/unit/with_dbs/04/products/test_products_handlers.py +++ b/services/web/server/tests/unit/with_dbs/04/products/test_products_rest.py @@ -10,12 +10,13 @@ import pytest from aiohttp.test_utils import TestClient -from models_library.api_schemas_webserver.product import GetProduct +from models_library.api_schemas_webserver.products import ProductGet, ProductUIGet from models_library.products import ProductName from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import UserInfoDict from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER +from servicelib.status_codes_utils import is_2xx_success from simcore_postgres_database.constants import QUANTIZE_EXP_ARG from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.groups.api import auto_add_user_to_product_group @@ -95,21 +96,73 @@ async def test_get_product( client.app, user_id=logged_user["id"], product_name=product_name ) - current_project_headers = {X_PRODUCT_NAME_HEADER: product_name} - response = await client.get("/v0/products/current", headers=current_project_headers) + current_product_headers = {X_PRODUCT_NAME_HEADER: product_name} + response = await client.get("/v0/products/current", headers=current_product_headers) data, error = await assert_status(response, status.HTTP_200_OK) - got_product = GetProduct(**data) + got_product = ProductGet(**data) assert got_product.name == product_name assert got_product.credits_per_usd == expected_credits_per_usd assert not error response = await client.get(f"/v0/products/{product_name}") data, error = await assert_status(response, status.HTTP_200_OK) - assert got_product == GetProduct(**data) + assert got_product == ProductGet(**data) assert not error - response = await client.get("/v0/product/invalid") + response = await client.get("/v0/products/invalid") data, error = await assert_status(response, status.HTTP_404_NOT_FOUND) assert not data assert error + + +@pytest.mark.parametrize( + "user_role, expected_status_code", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_403_FORBIDDEN), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + (UserRole.PRODUCT_OWNER, status.HTTP_200_OK), + (UserRole.ADMIN, status.HTTP_200_OK), + ], +) +async def test_get_current_product_ui( + app_products_names: list[ProductName], + product_name: ProductName, + logged_user: UserInfoDict, + client: TestClient, + user_role: UserRole, + expected_status_code: int, +): + assert logged_user["role"] == user_role.value + assert product_name in app_products_names + + # give access to user to this product + assert client.app + await auto_add_user_to_product_group( + client.app, user_id=logged_user["id"], product_name=product_name + ) + + assert ( + client.app.router["get_current_product_ui"].url_for().path + == "/v0/products/current/ui" + ) + response = await client.get( + "/v0/products/current/ui", headers={X_PRODUCT_NAME_HEADER: product_name} + ) + + data, error = await assert_status(response, expected_status_code) + + if is_2xx_success(expected_status_code): + # ui is something owned and fully controlled by the front-end + # Will be something like the data stored in this file + # https://github.com/itisfoundation/osparc-simcore/blob/1dcd369717959348099cc6241822a1f0aff0382c/services/static-webserver/client/source/resource/osparc/new_studies.json + assert not error + assert data + + product_ui = ProductUIGet.model_validate(data) + assert product_ui.product_name == product_name + else: + assert error + assert not data diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py index 4505a6f4e3e..08763afefa2 100644 --- a/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py @@ -8,7 +8,8 @@ import pytest from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE -from models_library.products import CreditResultGet, ProductName +from models_library.api_schemas_webserver.products import CreditResultRpcGet +from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from pydantic import TypeAdapter from pytest_mock import MockerFixture @@ -74,7 +75,7 @@ async def test_get_credit_amount( dollar_amount=Decimal(900), product_name="s4l", ) - credit_result = CreditResultGet.model_validate(result) + credit_result = CreditResultRpcGet.model_validate(result) assert credit_result.credit_amount == 100 result = await rpc_client.request( @@ -83,7 +84,7 @@ async def test_get_credit_amount( dollar_amount=Decimal(900), product_name="tis", ) - credit_result = CreditResultGet.model_validate(result) + credit_result = CreditResultRpcGet.model_validate(result) assert credit_result.credit_amount == 180 with pytest.raises(RPCServerError) as exc_info: diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_service.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_service.py new file mode 100644 index 00000000000..3f30f84b929 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/products/test_products_service.py @@ -0,0 +1,196 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +from decimal import Decimal + +import pytest +from aiohttp import web +from aiohttp.test_utils import TestServer +from models_library.products import ProductName +from pydantic import TypeAdapter, ValidationError +from pytest_mock import MockerFixture +from servicelib.exceptions import InvalidConfig +from simcore_postgres_database.utils_products_prices import ProductPriceInfo +from simcore_service_webserver.products import _service, products_service +from simcore_service_webserver.products._models import ProductStripeInfo +from simcore_service_webserver.products._repository import ProductRepository +from simcore_service_webserver.products.errors import ( + BelowMinimumPaymentError, + MissingStripeConfigError, + ProductNotFoundError, + ProductPriceNotDefinedError, + ProductTemplateNotFoundError, +) +from simcore_service_webserver.products.models import Product + + +@pytest.fixture +def app( + web_server: TestServer, +) -> web.Application: + # app initialized and server running + assert web_server.app + return web_server.app + + +async def test_load_products(app: web.Application): + products = await _service.load_products(app) + assert isinstance(products, list) + assert all(isinstance(product, Product) for product in products) + + +async def test_load_products_validation_error(app: web.Application, mocker): + mock_repo = mocker.patch( + "simcore_service_webserver.products._service.ProductRepository.create_from_app" + ) + + try: + TypeAdapter(int).validate_python("not-an-int") + except ValidationError as validation_error: + mock_repo.return_value.list_products.side_effect = validation_error + + with pytest.raises(InvalidConfig, match="Invalid product configuration in db"): + await _service.load_products(app) + + +async def test_get_default_product_name(app: web.Application): + default_product_name = await _service.get_default_product_name(app) + assert isinstance(default_product_name, ProductName) + + +async def test_get_product(app: web.Application, default_product_name: ProductName): + product = products_service.get_product(app, product_name=default_product_name) + assert product.name == default_product_name + + products = products_service.list_products(app) + assert len(products) == 1 + assert products[0] == product + + +async def test_products_on_uninitialized_app(default_product_name: ProductName): + uninit_app = web.Application() + with pytest.raises(ProductNotFoundError): + _service.get_product(uninit_app, default_product_name) + + +async def test_list_products_names(app: web.Application): + product_names = await products_service.list_products_names(app) + assert isinstance(product_names, list) + assert all(isinstance(name, ProductName) for name in product_names) + + +async def test_get_credit_price_info( + app: web.Application, default_product_name: ProductName +): + price_info = await _service.get_credit_price_info( + app, product_name=default_product_name + ) + assert price_info is None or isinstance(price_info, ProductPriceInfo) + + +async def test_get_product_ui(app: web.Application, default_product_name: ProductName): + repo = ProductRepository.create_from_app(app) + ui = await products_service.get_product_ui(repo, product_name=default_product_name) + assert ui == {}, "Expected empty by default" + + with pytest.raises(ProductNotFoundError): + await products_service.get_product_ui(repo, product_name="undefined") + + +async def test_get_credit_amount( + app: web.Application, default_product_name: ProductName, mocker: MockerFixture +): + # Test when ProductPriceNotDefinedError is raised + with pytest.raises(ProductPriceNotDefinedError): + await products_service.get_credit_amount( + app, dollar_amount=1, product_name=default_product_name + ) + + +async def test_get_credit_amount_with_repo_faking_data( + default_product_name: ProductName, mocker: MockerFixture +): + # NO need of database since repo is mocked + app = web.Application() + + # Mock the repository to return a valid price info + mock_repo = mocker.patch( + "simcore_service_webserver.products._service.ProductRepository.create_from_app" + ) + + async def _get_product_latest_price_info_or_none(*args, **kwargs): + return ProductPriceInfo( + usd_per_credit=Decimal("10.0"), min_payment_amount_usd=Decimal("5.0") + ) + + mock_repo.return_value.get_product_latest_price_info_or_none.side_effect = ( + _get_product_latest_price_info_or_none + ) + + # Test when BelowMinimumPaymentError is raised + with pytest.raises(BelowMinimumPaymentError): + await products_service.get_credit_amount( + app, dollar_amount=Decimal("3.0"), product_name=default_product_name + ) + + # Test when CreditResultGet is returned successfully + credit_result = await products_service.get_credit_amount( + app, dollar_amount=Decimal("10.0"), product_name=default_product_name + ) + assert credit_result.credit_amount == Decimal("1.0") + assert credit_result.product_name == default_product_name + + +async def test_get_product_stripe_info( + app: web.Application, default_product_name: ProductName +): + # database has no info + with pytest.raises(MissingStripeConfigError, match=default_product_name): + await products_service.get_product_stripe_info( + app, product_name=default_product_name + ) + + +async def test_get_product_stripe_info_with_repo_faking_data( + default_product_name: ProductName, mocker: MockerFixture +): + # NO need of database since repo is mocked + app = web.Application() + + # Mock the repository to return a valid stripe info + mock_repo = mocker.patch( + "simcore_service_webserver.products._service.ProductRepository.create_from_app" + ) + + # Test when stripe info is returned successfully + expected_stripe_info = ProductStripeInfo( + stripe_price_id="price_id", stripe_tax_rate_id="tax_id" + ) + + async def _mock(*args, **kw): + return expected_stripe_info + + mock_repo.return_value.get_product_stripe_info_or_none.side_effect = _mock + + stripe_info = await products_service.get_product_stripe_info( + app, product_name=default_product_name + ) + assert stripe_info == expected_stripe_info + + +async def test_get_template_content(app: web.Application): + template_name = "some_template" + with pytest.raises(ProductTemplateNotFoundError): + await _service.get_template_content(app, template_name=template_name) + + +async def test_auto_create_products_groups(app: web.Application): + groups = await _service.auto_create_products_groups(app) + assert isinstance(groups, dict) + + assert all( + group_id is not None for group_id in groups.values() + ), f"Invalid {groups}" diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_web.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_web.py new file mode 100644 index 00000000000..4db0e38867c --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/products/test_products_web.py @@ -0,0 +1,158 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import pytest +from aiohttp import web +from aiohttp.test_utils import TestClient, make_mocked_request +from models_library.products import ProductName +from pytest_mock import MockerFixture, MockType +from servicelib.rest_constants import X_PRODUCT_NAME_HEADER +from simcore_service_webserver._meta import API_VTAG +from simcore_service_webserver.products import products_web +from simcore_service_webserver.products.plugin import setup_products + + +@pytest.fixture +def setup_products_mocked(mocker: MockerFixture) -> MockType: + def _wrap(app: web.Application): + setup_products(app) + + # register test handlers + app.router.add_get( + f"/{API_VTAG}/test-helpers", + _test_helpers_handler, + name=_test_helpers_handler.__name__, + ) + app.router.add_get( + f"/{API_VTAG}/test-product-template-helpers", + _test_product_template_handler, + name=_test_product_template_handler.__name__, + ) + + return True + + return mocker.patch( + "simcore_service_webserver.application.setup_products", + autospec=True, + side_effect=_wrap, + ) + + +@pytest.fixture +def client( + setup_products_mocked: MockType, # keep before client fixture! + client: TestClient, +) -> TestClient: + assert setup_products_mocked.called + + assert client.app + assert client.app.router + + registered_routes = { + route.resource.canonical + for route in client.app.router.routes() + if route.resource + } + assert f"/{API_VTAG}/test-helpers" in registered_routes + + return client + + +async def _test_helpers_handler(request: web.Request): + product_name = products_web.get_product_name(request) + current_product = products_web.get_current_product(request) + + assert current_product.name == product_name + + credit_price_info = await products_web.get_current_product_credit_price_info( + request + ) + assert credit_price_info is None + + return web.json_response( + { + "current_product": current_product.model_dump(mode="json"), + "product_name": product_name, + "credit_price_info": credit_price_info, + } + ) + + +async def test_request_helpers(client: TestClient, default_product_name: ProductName): + + resp = await client.get( + f"/{API_VTAG}/test-helpers", + headers={X_PRODUCT_NAME_HEADER: default_product_name}, + ) + + assert resp.ok, f"Got {await resp.text()}" + + got = await resp.json() + assert got["product_name"] == default_product_name + + +async def _test_product_template_handler(request: web.Request): + product_name = products_web.get_product_name(request) + + # if no product, it should return common + + # if no template for product, it should return common + # template/common/close_account.jinja2" + template_path = await products_web.get_product_template_path( + request, filename="close_account.jinja2" + ) + assert template_path.exists() + assert template_path.name == "close_account.jinja2" + assert "common/" in f"{template_path.resolve().absolute()}" + + # if specific template, it gets and caches in file + # "templates/osparc/registration_email.jinja2" + template_path = await products_web.get_product_template_path( + request, filename="registration_email.jinja2" + ) + assert template_path.exists() + assert template_path.name == "registration_email.jinja2" + assert f"{product_name}/" in f"{template_path.resolve().absolute()}" + + # get again and should use file + + for _ in range(2): + got = await products_web.get_product_template_path( + request, filename="registration_email.jinja2" + ) + assert got == template_path + + with pytest.raises(ValueError, match="not part of the templates/common"): + await products_web.get_product_template_path( + request, filename="invalid-template-name.jinja" + ) + + return web.json_response() + + +async def test_product_template_helpers( + client: TestClient, default_product_name: ProductName +): + + resp = await client.get( + f"/{API_VTAG}/test-product-template-helpers", + headers={X_PRODUCT_NAME_HEADER: default_product_name}, + ) + + assert resp.ok, f"Got {await resp.text()}" + + +async def test_get_product_template_path_without_product(): + fake_request = make_mocked_request("GET", "/fake", app=web.Application()) + + # if no product, it should return common + template_path = await products_web.get_product_template_path( + fake_request, filename="close_account.jinja2" + ) + + assert template_path.exists() + assert template_path.name == "close_account.jinja2" + assert "common/" in f"{template_path.resolve().absolute()}" diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py index bfa6fe9fece..4abc988d577 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py @@ -26,7 +26,6 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc "WEBSERVER_DIAGNOSTICS": "null", "WEBSERVER_EXPORTER": "null", "WEBSERVER_GROUPS": "1", - "WEBSERVER_META_MODELING": "0", "WEBSERVER_PRODUCTS": "1", "WEBSERVER_PUBLICATIONS": "0", "WEBSERVER_RABBITMQ": "null", @@ -35,7 +34,6 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc "WEBSERVER_STORAGE": "null", "WEBSERVER_TAGS": "1", "WEBSERVER_TRACING": "null", - "WEBSERVER_VERSION_CONTROL": "0", "WEBSERVER_WALLETS": "0", }, ) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py index 4a3194ca9a4..3498fd2abcb 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py @@ -5,7 +5,6 @@ # pylint: disable=unused-variable import asyncio -import json import re import urllib.parse from collections.abc import AsyncIterator @@ -18,11 +17,14 @@ from aiohttp.test_utils import TestClient, TestServer from aioresponses import aioresponses from models_library.projects_state import ProjectLocked, ProjectStatus -from pydantic import BaseModel, ByteSize, TypeAdapter, ValidationError +from pydantic import BaseModel, ByteSize, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import UserInfoDict, UserRole -from pytest_simcore.pydantic_models import walk_model_examples_in_package +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) from servicelib.aiohttp import status from settings_library.redis import RedisSettings from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME @@ -238,16 +240,11 @@ async def test_api_list_supported_filetypes(client: TestClient): walk_model_examples_in_package(simcore_service_webserver.studies_dispatcher), ) def test_model_examples( - model_cls: type[BaseModel], example_name: int, example_data: Any + model_cls: type[BaseModel], example_name: str, example_data: Any ): - try: - assert model_cls.model_validate(example_data) is not None - except ValidationError as err: - pytest.fail( - f"{example_name} is invalid {model_cls.__module__}.{model_cls.__name__}:" - f"\n{json.dumps(example_data, indent=1)}" - f"\nError: {err}" - ) + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) async def test_api_list_services(client: TestClient): @@ -286,7 +283,7 @@ def catalog_subsystem_mock(mocker: MockerFixture) -> None: ] mock = mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", autospec=True, ) @@ -302,7 +299,7 @@ def mocks_on_projects_api(mocker) -> None: All projects in this module are UNLOCKED """ mocker.patch( - "simcore_service_webserver.projects.projects_service._get_project_lock_state", + "simcore_service_webserver.projects._projects_service._get_project_lock_state", return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), ) @@ -399,11 +396,11 @@ async def test_dispatch_study_anonymously( ): assert client.app mock_client_director_v2_func = mocker.patch( - "simcore_service_webserver.director_v2.api.create_or_update_pipeline", + "simcore_service_webserver.director_v2.director_v2_service.create_or_update_pipeline", return_value=None, ) mock_dynamic_scheduler_update_project_networks = mocker.patch( - "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_api.update_projects_networks", + "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_service.update_projects_networks", return_value=None, ) @@ -465,11 +462,11 @@ async def test_dispatch_logged_in_user( ): assert client.app mock_client_director_v2_pipline_update = mocker.patch( - "simcore_service_webserver.director_v2.api.create_or_update_pipeline", + "simcore_service_webserver.director_v2.director_v2_service.create_or_update_pipeline", return_value=None, ) mock_dynamic_scheduler_update_project_networks = mocker.patch( - "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_api.update_projects_networks", + "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_service.update_projects_networks", return_value=None, ) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py index b1c7e7259d2..2ae68f22182 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py @@ -18,7 +18,7 @@ from pytest_simcore.helpers.webserver_login import NewUser from pytest_simcore.helpers.webserver_projects import delete_all_projects from simcore_service_webserver.groups.api import auto_add_user_to_groups -from simcore_service_webserver.projects.projects_service import get_project_for_user +from simcore_service_webserver.projects._projects_service import get_project_for_user from simcore_service_webserver.studies_dispatcher._models import ServiceInfo from simcore_service_webserver.studies_dispatcher._projects import ( UserInfo, @@ -94,7 +94,7 @@ async def test_add_new_project_from_model_instance( assert client.app mock_directorv2_api = mocker.patch( - "simcore_service_webserver.director_v2.api.create_or_update_pipeline", + "simcore_service_webserver.director_v2.director_v2_service.create_or_update_pipeline", return_value=None, ) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py index 10a9367d101..16dfde75956 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py @@ -28,11 +28,13 @@ from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import LRTask from servicelib.aiohttp.long_running_tasks.server import TaskProgress -from servicelib.aiohttp.rest_responses import unwrap_envelope from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.rest_responses import unwrap_envelope from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME +from simcore_service_webserver.projects._projects_service import ( + submit_delete_project_task, +) from simcore_service_webserver.projects.models import ProjectDict -from simcore_service_webserver.projects.projects_service import submit_delete_project_task from simcore_service_webserver.users.api import ( delete_user_without_projects, get_user_role, @@ -134,7 +136,7 @@ def mocks_on_projects_api(mocker: MockerFixture) -> None: All projects in this module are UNLOCKED """ mocker.patch( - "simcore_service_webserver.projects.projects_service._get_project_lock_state", + "simcore_service_webserver.projects._projects_service._get_project_lock_state", return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), ) @@ -411,6 +413,7 @@ async def enforce_garbage_collect_guest(uid): assert data["login"] != user_email +@pytest.mark.flaky(max_runs=3) @pytest.mark.parametrize("number_of_simultaneous_requests", [1, 2, 32]) async def test_guest_user_is_not_garbage_collected( number_of_simultaneous_requests: int, diff --git a/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py b/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py index b5ddcaf6f31..1a615af2551 100644 --- a/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py +++ b/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py @@ -28,7 +28,7 @@ from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.login.utils import notify_user_confirmation -from simcore_service_webserver.products.api import get_product +from simcore_service_webserver.products.products_service import get_product from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.users.api import UserDisplayAndIdNamesTuple from simcore_service_webserver.wallets._events import ( diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py index c301ead5f90..ac519fba507 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py @@ -27,17 +27,12 @@ @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - mocker.patch( - "simcore_service_webserver.projects._crud_handlers.project_uses_available_services", + "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", spec=True, return_value=True, ) @@ -259,11 +254,11 @@ def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: autospec=True, ) mocker.patch( - "simcore_service_webserver.projects.projects_service.remove_project_dynamic_services", + "simcore_service_webserver.projects._projects_service.remove_project_dynamic_services", autospec=True, ) mocker.patch( - "simcore_service_webserver.projects._crud_api_delete.api.delete_pipeline", + "simcore_service_webserver.projects._crud_api_delete.director_v2_service.delete_pipeline", autospec=True, ) return mocker.patch( diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py index 99bbaffc4a2..c71cdf4fb40 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py @@ -23,17 +23,12 @@ @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - mocker.patch( - "simcore_service_webserver.projects._crud_handlers.project_uses_available_services", + "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", spec=True, return_value=True, ) diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py index ea7105a3338..b18252fbdd1 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py @@ -28,17 +28,12 @@ def user_role() -> UserRole: @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - mocker.patch( - "simcore_service_webserver.projects._crud_handlers.project_uses_available_services", + "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", spec=True, return_value=True, ) diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py index a81c76012a0..a308040670b 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py @@ -28,17 +28,12 @@ @pytest.fixture def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): mocker.patch( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", spec=True, return_value=[], ) mocker.patch( - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - mocker.patch( - "simcore_service_webserver.projects._crud_handlers.project_uses_available_services", + "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", spec=True, return_value=True, ) diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index d583e3c783e..38e96c4367d 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -1,20 +1,13 @@ -""" Configuration for unit testing with a postgress fixture - - - Unit testing of webserver app with a postgress service as fixture - - Starts test session by running a postgres container as a fixture (see postgress_service) - - IMPORTANT: remember that these are still unit-tests! -""" - -# nopycln: file # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable +# pylint: disable=too-many-arguments import asyncio import random import sys import textwrap +import warnings from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable, Iterator from copy import deepcopy from decimal import Decimal @@ -35,7 +28,6 @@ from aiohttp import web from aiohttp.test_utils import TestClient, TestServer from aiopg.sa import create_engine -from aiopg.sa.connection import SAConnection from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.products import ProductName @@ -66,9 +58,9 @@ get_default_product_name, get_or_create_product_group, ) -from simcore_service_webserver._constants import INDEX_RESOURCE_NAME from simcore_service_webserver.application import create_application from simcore_service_webserver.application_settings_utils import AppConfigDict +from simcore_service_webserver.constants import INDEX_RESOURCE_NAME from simcore_service_webserver.db.plugin import get_database_engine from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.statics._constants import ( @@ -76,6 +68,7 @@ FRONTEND_APPS_AVAILABLE, ) from sqlalchemy import exc as sql_exceptions +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -288,8 +281,7 @@ async def _mocked_get_services_for_user(*args, **kwargs): return services_in_project for namespace in ( - "simcore_service_webserver.projects._crud_api_read.get_services_for_user_in_product", - "simcore_service_webserver.projects._crud_handlers.get_services_for_user_in_product", + "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", ): mock = mocker.patch( namespace, @@ -383,7 +375,7 @@ async def _mock_result(): ) mock2 = mocker.patch( - "simcore_service_webserver.projects.projects_service.storage_api.delete_data_folders_of_project_node", + "simcore_service_webserver.projects._projects_service.storage_service.delete_data_folders_of_project_node", autospec=True, return_value=None, ) @@ -425,7 +417,7 @@ async def mocked_dynamic_services_interface( ) mock["director_v2.api.create_or_update_pipeline"] = mocker.patch( - "simcore_service_webserver.director_v2.api.create_or_update_pipeline", + "simcore_service_webserver.director_v2.director_v2_service.create_or_update_pipeline", autospec=True, return_value=None, ) @@ -535,6 +527,13 @@ async def aiopg_engine(postgres_db: sa.engine.Engine) -> AsyncIterator[aiopg.sa. engine = await create_engine(f"{postgres_db.url}") assert engine + warnings.warn( + "The 'aiopg_engine' fixture is deprecated and will be removed in a future release. " + "Please use 'asyncpg_engine' fixture instead.", + DeprecationWarning, + stacklevel=2, + ) + yield engine if engine: @@ -542,6 +541,34 @@ async def aiopg_engine(postgres_db: sa.engine.Engine) -> AsyncIterator[aiopg.sa. await engine.wait_closed() +@pytest.fixture +async def asyncpg_engine( # <-- WE SHOULD USE THIS ONE instead of aiopg_engine + postgres_db: sa.engine.Engine, is_pdb_enabled: bool +) -> AsyncIterable[AsyncEngine]: + # NOTE: call to postgres BEFORE app starts + dsn = f"{postgres_db.url}".replace("postgresql://", "postgresql+asyncpg://") + minsize = 1 + maxsize = 50 + + engine: AsyncEngine = create_async_engine( + dsn, + pool_size=minsize, + max_overflow=maxsize - minsize, + connect_args={ + "server_settings": { + "application_name": "webserver_tests_with_dbs:asyncpg_engine" + } + }, + pool_pre_ping=True, # https://docs.sqlalchemy.org/en/14/core/pooling.html#dealing-with-disconnects + future=True, # this uses sqlalchemy 2.0 API, shall be removed when sqlalchemy 2.0 is released + echo=is_pdb_enabled, + ) + + yield engine + + await engine.dispose() + + # REDIS CORE SERVICE ------------------------------------------------------ def _is_redis_responsive(host: str, port: int, password: str) -> bool: # username via https://stackoverflow.com/a/78236235 @@ -631,7 +658,7 @@ async def user_project( fake_project: ProjectDict, logged_user: UserInfoDict, tests_data_dir: Path, - osparc_product_name: str, + osparc_product_name: ProductName, ) -> AsyncIterator[ProjectDict]: async with NewProject( fake_project, @@ -679,23 +706,13 @@ async def with_permitted_override_services_specifications( @pytest.fixture -async def _pre_connection(postgres_db: sa.engine.Engine) -> AsyncIterable[SAConnection]: - # NOTE: call to postgres BEFORE app starts - async with await create_engine( - f"{postgres_db.url}" - ) as engine, engine.acquire() as conn: - yield conn - - -@pytest.fixture -async def all_products_names( - _pre_connection: SAConnection, +async def app_products_names( + asyncpg_engine: AsyncEngine, ) -> AsyncIterable[list[ProductName]]: - # default product - result = await _pre_connection.execute( - products.select().order_by(products.c.priority) - ) - rows = await result.fetchall() + async with asyncpg_engine.connect() as conn: + # default product + result = await conn.execute(products.select().order_by(products.c.priority)) + rows = result.fetchall() assert rows assert len(rows) == 1 osparc_product_row = rows[0] @@ -706,37 +723,41 @@ async def all_products_names( priority = 1 for name in FRONTEND_APPS_AVAILABLE: if name != FRONTEND_APP_DEFAULT: - result = await _pre_connection.execute( - products.insert().values( - random_product( - name=name, - priority=priority, - login_settings=osparc_product_row.login_settings, - group_id=None, + + async with asyncpg_engine.begin() as conn: + result = await conn.execute( + products.insert().values( + random_product( + name=name, + priority=priority, + login_settings=osparc_product_row.login_settings, + group_id=None, + ) ) ) - ) - await get_or_create_product_group(_pre_connection, product_name=name) + await get_or_create_product_group(conn, product_name=name) priority += 1 - # get all products - result = await _pre_connection.execute( - sa.select(products.c.name).order_by(products.c.priority) - ) - rows = await result.fetchall() + async with asyncpg_engine.connect() as conn: + # get all products + result = await conn.execute( + sa.select(products.c.name).order_by(products.c.priority) + ) + rows = result.fetchall() yield [r.name for r in rows] - await _pre_connection.execute(products_prices.delete()) - await _pre_connection.execute( - products.delete().where(products.c.name != FRONTEND_APP_DEFAULT) - ) + async with asyncpg_engine.begin() as conn: + await conn.execute(products_prices.delete()) + await conn.execute( + products.delete().where(products.c.name != FRONTEND_APP_DEFAULT) + ) @pytest.fixture async def all_product_prices( - _pre_connection: SAConnection, - all_products_names: list[ProductName], + asyncpg_engine: AsyncEngine, + app_products_names: list[ProductName], faker: Faker, ) -> dict[ProductName, Decimal | None]: """Initial list of prices for all products""" @@ -748,23 +769,24 @@ async def all_product_prices( "tiplite": Decimal(5), "s4l": Decimal(9), "s4llite": Decimal(0), # free of charge - "s4lacad": Decimal(1.1), + "s4lacad": Decimal("1.1"), } result = {} - for product_name in all_products_names: + for product_name in app_products_names: usd_or_none = product_price.get(product_name) if usd_or_none is not None: - await _pre_connection.execute( - products_prices.insert().values( - product_name=product_name, - usd_per_credit=usd_or_none, - comment=faker.sentence(), - min_payment_amount_usd=10, - stripe_price_id=faker.pystr(), - stripe_tax_rate_id=faker.pystr(), + async with asyncpg_engine.begin() as conn: + await conn.execute( + products_prices.insert().values( + product_name=product_name, + usd_per_credit=usd_or_none, + comment=faker.sentence(), + min_payment_amount_usd=10, + stripe_price_id=faker.pystr(), + stripe_tax_rate_id=faker.pystr(), + ) ) - ) result[product_name] = usd_or_none @@ -774,23 +796,23 @@ async def all_product_prices( @pytest.fixture async def latest_osparc_price( all_product_prices: dict[ProductName, Decimal], - _pre_connection: SAConnection, + asyncpg_engine: AsyncEngine, ) -> Decimal: """This inserts a new price for osparc in the history (i.e. the old price of osparc is still in the database) """ - - usd = await _pre_connection.scalar( - products_prices.insert() - .values( - product_name="osparc", - usd_per_credit=all_product_prices["osparc"] + 5, - comment="New price for osparc", - stripe_price_id="stripe-price-id", - stripe_tax_rate_id="stripe-tax-rate-id", + async with asyncpg_engine.begin() as conn: + usd = await conn.scalar( + products_prices.insert() + .values( + product_name="osparc", + usd_per_credit=all_product_prices["osparc"] + 5, + comment="New price for osparc", + stripe_price_id="stripe-price-id", + stripe_tax_rate_id="stripe-tax-rate-id", + ) + .returning(products_prices.c.usd_per_credit) ) - .returning(products_prices.c.usd_per_credit) - ) assert usd is not None assert usd != all_product_prices["osparc"] return Decimal(usd) diff --git a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml index 2a4402c85a2..016cb6f7ca2 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml @@ -1,6 +1,6 @@ services: postgres: - image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" + image: "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f" restart: always init: true environment: @@ -34,6 +34,9 @@ services: - "log_min_duration_statement=500" - "-c" - "log_lock_waits=on" + # -c fsync=off is not recommended for production as this disable writing to disk https://pythonspeed.com/articles/faster-db-tests/ + - "-c" + - "fsync=off" adminer: image: adminer:4.8.1 init: true @@ -85,7 +88,7 @@ services: - "18081:8081" rabbit: - image: itisfoundation/rabbitmq:3.11.2-management + image: itisfoundation/rabbitmq:3.13.7-management init: true environment: - RABBITMQ_DEFAULT_USER=admin diff --git a/services/web/server/tests/unit/with_dbs/docker-compose.yml b/services/web/server/tests/unit/with_dbs/docker-compose.yml index 6fde4baab74..2fbc51dec19 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" + image: "postgres:17.2-alpine3.21@sha256:17143ad87797f511036cf8f50ada164aeb371f0d8068a172510549fb5d2cd65f" restart: always init: true environment: @@ -29,6 +29,9 @@ services: - "tcp_keepalives_count=5" - "-c" - "log_lock_waits=on" + # -c fsync=off is not recommended for production as this disable writing to disk https://pythonspeed.com/articles/faster-db-tests/ + - "-c" + - "fsync=off" redis: image: "redis:6.2.6@sha256:4bed291aa5efb9f0d77b76ff7d4ab71eee410962965d052552db1fb80576431d" init: true @@ -50,5 +53,5 @@ services: "${TEST_REDIS_PASSWORD}" ] rabbit: - image: itisfoundation/rabbitmq:3.11.2-management + image: itisfoundation/rabbitmq:3.13.7-management init: true diff --git a/tests/e2e-playwright/Makefile b/tests/e2e-playwright/Makefile index dfee6eb774a..4b684cfa823 100644 --- a/tests/e2e-playwright/Makefile +++ b/tests/e2e-playwright/Makefile @@ -161,6 +161,12 @@ $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) $(S4L if [ "$@" = "$(JUPYTER_LAB_INPUT_FILE)" ]; then \ read -p "Enter the size of the large file (human readable form e.g. 3Gib): " LARGE_FILE_SIZE; \ echo "--service-key=jupyter-math --large-file-size=$$LARGE_FILE_SIZE" >> $@; \ + read -p "Enter the service version (default to latest): " SERVICE_VERSION; \ + if [ -z "$$SERVICE_VERSION" ]; then \ + echo "No service version specified, using default."; \ + else \ + echo "--service-version=$$SERVICE_VERSION" >> $@; \ + fi; \ elif [ "$@" = "$(S4L_INPUT_FILE)" ]; then \ read -p "Do you want to check the videostreaming ? (requires to run with chrome/msedge) [y/n]: " VIDEOSTREAM; \ if [ "$$VIDEOSTREAM" = "y" ]; then \ @@ -173,6 +179,12 @@ $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) $(S4L else \ read -p "Enter the service key: " SERVICE_KEY; \ echo "--service-key=$$SERVICE_KEY" >> $@; \ + read -p "Enter the service version (default to latest): " SERVICE_VERSION; \ + if [ -z "$$SERVICE_VERSION" ]; then \ + echo "No service version specified, using default."; \ + else \ + echo "--service-version=$$SERVICE_VERSION" >> $@; \ + fi; \ fi; \ elif [ "$@" = "$(SLEEPERS_INPUT_FILE)" ]; then \ read -p "Enter the number of sleepers: " NUM_SLEEPERS; \ diff --git a/tests/e2e-playwright/requirements/_test.txt b/tests/e2e-playwright/requirements/_test.txt index 59eb1576513..43eb1c9e8d0 100644 --- a/tests/e2e-playwright/requirements/_test.txt +++ b/tests/e2e-playwright/requirements/_test.txt @@ -8,7 +8,7 @@ anyio==4.8.0 # httpx arrow==1.3.0 # via -r requirements/_test_wo_playwright.txt -certifi==2024.12.14 +certifi==2025.1.31 # via # -r requirements/_test_wo_playwright.txt # httpcore @@ -26,7 +26,7 @@ docker==7.1.0 # via -r requirements/_test_wo_playwright.txt email-validator==2.2.0 # via -r requirements/_test_wo_playwright.txt -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test_wo_playwright.txt greenlet==3.1.1 # via playwright @@ -64,7 +64,7 @@ packaging==24.2 # -r requirements/_test_wo_playwright.txt # pytest # pytest-sugar -playwright==1.49.1 +playwright==1.50.0 # via pytest-playwright pluggy==1.5.0 # via @@ -76,9 +76,9 @@ pydantic-core==2.27.2 # via # -r requirements/_test_wo_playwright.txt # pydantic -pyee==12.0.0 +pyee==12.1.1 # via playwright -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test_wo_playwright.txt # pytest-base-url @@ -97,7 +97,7 @@ pytest-metadata==3.1.1 # via # -r requirements/_test_wo_playwright.txt # pytest-html -pytest-playwright==0.6.2 +pytest-playwright==0.7.0 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test_wo_playwright.txt @@ -107,7 +107,6 @@ python-dateutil==2.9.0.post0 # via # -r requirements/_test_wo_playwright.txt # arrow - # faker python-slugify==8.0.4 # via pytest-playwright pyyaml==6.0.2 @@ -141,10 +140,13 @@ typing-extensions==4.12.2 # via # -r requirements/_test_wo_playwright.txt # anyio - # faker # pydantic # pydantic-core # pyee +tzdata==2025.1 + # via + # -r requirements/_test_wo_playwright.txt + # faker urllib3==2.3.0 # via # -r requirements/_test_wo_playwright.txt diff --git a/tests/e2e-playwright/requirements/_test_wo_playwright.txt b/tests/e2e-playwright/requirements/_test_wo_playwright.txt index 442c520fc24..6bb18aa518f 100644 --- a/tests/e2e-playwright/requirements/_test_wo_playwright.txt +++ b/tests/e2e-playwright/requirements/_test_wo_playwright.txt @@ -4,7 +4,7 @@ anyio==4.8.0 # via httpx arrow==1.3.0 # via -r requirements/_test_wo_playwright.in -certifi==2024.12.14 +certifi==2025.1.31 # via # httpcore # httpx @@ -17,7 +17,7 @@ docker==7.1.0 # via -r requirements/_test_wo_playwright.in email-validator==2.2.0 # via pydantic -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test_wo_playwright.in h11==0.14.0 # via httpcore @@ -47,7 +47,7 @@ pydantic==2.10.6 # via -r requirements/_test_wo_playwright.in pydantic-core==2.27.2 # via pydantic -pytest==8.3.4 +pytest==8.3.5 # via # pytest-html # pytest-instafail @@ -64,9 +64,7 @@ pytest-runner==6.0.1 pytest-sugar==1.0.0 # via -r requirements/_test_wo_playwright.in python-dateutil==2.9.0.post0 - # via - # arrow - # faker + # via arrow pyyaml==6.0.2 # via -r requirements/_test_wo_playwright.in requests==2.32.3 @@ -84,9 +82,10 @@ types-python-dateutil==2.9.0.20241206 typing-extensions==4.12.2 # via # anyio - # faker # pydantic # pydantic-core +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # docker diff --git a/tests/e2e-playwright/requirements/_tools.txt b/tests/e2e-playwright/requirements/_tools.txt index 645bb336ed6..853cda1d8ca 100644 --- a/tests/e2e-playwright/requirements/_tools.txt +++ b/tests/e2e-playwright/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -18,15 +18,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -41,7 +41,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -52,7 +52,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -63,9 +63,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -73,7 +73,7 @@ typing-extensions==4.12.2 # via # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index a3402b7746d..7df7c63c5e6 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -112,6 +112,13 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=False, help="Whether service is a legacy service (no sidecar)", ) + group.addoption( + "--service-version", + action="store", + type=str, + default=None, + help="The service version option defines a service specific version", + ) group.addoption( "--template-id", action="store", @@ -272,6 +279,14 @@ def is_service_legacy(request: pytest.FixtureRequest) -> bool: return TypeAdapter(bool).validate_python(autoscaled) +@pytest.fixture(scope="session") +def service_version(request: pytest.FixtureRequest) -> str | None: + if key := request.config.getoption("--service-version"): + assert isinstance(key, str) + return key + return None + + @pytest.fixture(scope="session") def template_id(request: pytest.FixtureRequest) -> str | None: if key := request.config.getoption("--template-id"): @@ -438,6 +453,22 @@ def _open_with_resources(page: Page, *, click_it: bool): return open_with_resources_button +def _select_service_version(page: Page, *, version: str) -> None: + try: + # since https://github.com/ITISFoundation/osparc-simcore/pull/7060 + with log_context(logging.INFO, msg=f"selecting version {version}"): + page.get_by_test_id("serviceSelectBox").click(timeout=5 * SECOND) + page.get_by_test_id(f"serviceVersionItem_{version}").click( + timeout=5 * SECOND + ) + # the call is cached so the best is to wait here a bit (sic) + page.wait_for_timeout(2 * SECOND) + + except TimeoutError: + # we try the non robust way + page.get_by_label("Version").select_option(version) + + @pytest.fixture def create_new_project_and_delete( page: Page, @@ -445,16 +476,19 @@ def create_new_project_and_delete( is_product_billable: bool, api_request_context: APIRequestContext, product_url: AnyUrl, -) -> Iterator[Callable[[tuple[RunningState], bool], dict[str, Any]]]: +) -> Iterator[ + Callable[[tuple[RunningState], bool, str | None, str | None], dict[str, Any]] +]: """The first available service currently displayed in the dashboard will be opened NOTE: cannot be used multiple times or going back to dashboard will fail!! """ created_project_uuids = [] def _( - expected_states: tuple[RunningState] = (RunningState.NOT_STARTED,), - press_open: bool = True, - template_id: str | None = None, + expected_states: tuple[RunningState], + press_open: bool, + template_id: str | None, + service_version: str | None, ) -> dict[str, Any]: assert ( len(created_project_uuids) == 0 @@ -527,6 +561,8 @@ def wait_for_done(response): # not expected in the sim4life context though ... else: + if service_version is not None: + _select_service_version(page, version=service_version) open_button.click() if is_product_billable: _open_with_resources(page, click_it=True) @@ -618,7 +654,9 @@ def find_and_start_service_in_dashboard( page: Page, ) -> Callable[[ServiceType, str, str | None], None]: def _( - service_type: ServiceType, service_name: str, service_key_prefix: str | None + service_type: ServiceType, + service_name: str, + service_key_prefix: str | None, ) -> None: with log_context(logging.INFO, f"Finding {service_name=} in dashboard"): page.get_by_test_id("servicesTabBtn").click() @@ -638,13 +676,13 @@ def _( def create_project_from_new_button( start_study_from_plus_button: Callable[[str], None], create_new_project_and_delete: Callable[ - [tuple[RunningState], bool], dict[str, Any] + [tuple[RunningState], bool, str | None, str | None], dict[str, Any] ], ) -> Callable[[str], dict[str, Any]]: def _(plus_button_test_id: str) -> dict[str, Any]: start_study_from_plus_button(plus_button_test_id) expected_states = (RunningState.UNKNOWN,) - return create_new_project_and_delete(expected_states, False) + return create_new_project_and_delete(expected_states, False, None, None) return _ @@ -652,12 +690,14 @@ def _(plus_button_test_id: str) -> dict[str, Any]: @pytest.fixture def create_project_from_template_dashboard( find_and_click_template_in_dashboard: Callable[[str], None], - create_new_project_and_delete: Callable[[tuple[RunningState]], dict[str, Any]], -) -> Callable[[ServiceType, str, str | None], dict[str, Any]]: + create_new_project_and_delete: Callable[ + [tuple[RunningState], bool, str | None, str | None], dict[str, Any] + ], +) -> Callable[[str], dict[str, Any]]: def _(template_id: str) -> dict[str, Any]: find_and_click_template_in_dashboard(template_id) expected_states = (RunningState.UNKNOWN,) - return create_new_project_and_delete(expected_states, True, template_id) + return create_new_project_and_delete(expected_states, True, template_id, None) return _ @@ -665,10 +705,15 @@ def _(template_id: str) -> dict[str, Any]: @pytest.fixture def create_project_from_service_dashboard( find_and_start_service_in_dashboard: Callable[[ServiceType, str, str | None], None], - create_new_project_and_delete: Callable[[tuple[RunningState]], dict[str, Any]], -) -> Callable[[ServiceType, str, str | None], dict[str, Any]]: + create_new_project_and_delete: Callable[ + [tuple[RunningState], bool, str | None, str | None], dict[str, Any] + ], +) -> Callable[[ServiceType, str, str | None, str | None], dict[str, Any]]: def _( - service_type: ServiceType, service_name: str, service_key_prefix: str | None + service_type: ServiceType, + service_name: str, + service_key_prefix: str | None, + service_version: str | None, ) -> dict[str, Any]: find_and_start_service_in_dashboard( service_type, service_name, service_key_prefix @@ -676,7 +721,9 @@ def _( expected_states = (RunningState.UNKNOWN,) if service_type is ServiceType.COMPUTATIONAL: expected_states = (RunningState.NOT_STARTED,) - return create_new_project_and_delete(expected_states, True) + return create_new_project_and_delete( + expected_states, True, None, service_version + ) return _ diff --git a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py index fcd20bbbd04..f61d510b09b 100644 --- a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py +++ b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py @@ -64,9 +64,10 @@ def test_jupyterlab( page: Page, log_in_and_out: RestartableWebSocket, create_project_from_service_dashboard: Callable[ - [ServiceType, str, str | None], dict[str, Any] + [ServiceType, str, str | None, str | None], dict[str, Any] ], service_key: str, + service_version: str | None, large_file_size: ByteSize, large_file_block_size: ByteSize, product_url: AnyUrl, @@ -86,7 +87,7 @@ def test_jupyterlab( ), ): project_data = create_project_from_service_dashboard( - ServiceType.DYNAMIC, service_key, None + ServiceType.DYNAMIC, service_key, None, service_version ) assert "workbench" in project_data, "Expected workbench to be in project data!" assert isinstance( diff --git a/tests/e2e-playwright/tests/sim4life/test_sim4life.py b/tests/e2e-playwright/tests/sim4life/test_sim4life.py index 10a9ddcf97e..b3747da27b1 100644 --- a/tests/e2e-playwright/tests/sim4life/test_sim4life.py +++ b/tests/e2e-playwright/tests/sim4life/test_sim4life.py @@ -23,11 +23,12 @@ def test_sim4life( page: Page, create_project_from_service_dashboard: Callable[ - [ServiceType, str, str | None], dict[str, Any] + [ServiceType, str, str | None, str | None], dict[str, Any] ], create_project_from_new_button: Callable[[str], dict[str, Any]], log_in_and_out: RestartableWebSocket, service_key: str, + service_version: str | None, use_plus_button: bool, is_autoscaled: bool, check_videostreaming: bool, @@ -38,7 +39,7 @@ def test_sim4life( project_data = create_project_from_new_button(service_key) else: project_data = create_project_from_service_dashboard( - ServiceType.DYNAMIC, service_key, None + ServiceType.DYNAMIC, service_key, None, service_version ) assert "workbench" in project_data, "Expected workbench to be in project data!" diff --git a/tests/e2e-playwright/tests/sleepers/test_sleepers.py b/tests/e2e-playwright/tests/sleepers/test_sleepers.py index 4415fcf3e49..570511b158d 100644 --- a/tests/e2e-playwright/tests/sleepers/test_sleepers.py +++ b/tests/e2e-playwright/tests/sleepers/test_sleepers.py @@ -37,9 +37,9 @@ _WAITING_FOR_CLUSTER_MAX_WAITING_TIME: Final[int] = 5 * MINUTE _WAITING_FOR_STARTED_MAX_WAITING_TIME: Final[int] = 5 * MINUTE _WAITING_FOR_SUCCESS_MAX_WAITING_TIME_PER_SLEEPER: Final[int] = 1 * MINUTE -_WAITING_FOR_FILE_NAMES_MAX_WAITING_TIME: Final[ - datetime.timedelta -] = datetime.timedelta(seconds=30) +_WAITING_FOR_FILE_NAMES_MAX_WAITING_TIME: Final[datetime.timedelta] = ( + datetime.timedelta(seconds=30) +) _WAITING_FOR_FILE_NAMES_WAIT_INTERVAL: Final[datetime.timedelta] = datetime.timedelta( seconds=1 ) @@ -67,6 +67,7 @@ def _get_expected_file_names_for_version(version: Version) -> list[str]: ) def _get_file_names(page: Page) -> list[str]: file_names_found = [] + page.get_by_test_id("folderGridView").click() for file in page.get_by_test_id("FolderViewerItem").all(): file_name = file.text_content() assert file_name @@ -81,14 +82,14 @@ def test_sleepers( page: Page, log_in_and_out: RestartableWebSocket, create_project_from_service_dashboard: Callable[ - [ServiceType, str, str | None], dict[str, Any] + [ServiceType, str, str | None, str | None], dict[str, Any] ], start_and_stop_pipeline: Callable[..., SocketIOEvent], num_sleepers: int, input_sleep_time: int | None, ): project_data = create_project_from_service_dashboard( - ServiceType.COMPUTATIONAL, "sleeper", "itis" + ServiceType.COMPUTATIONAL, "sleeper", "itis", None ) # we are now in the workbench @@ -216,7 +217,7 @@ def test_sleepers( sleeper.click() # waiting for this response is not enough, the frontend needs some time to show the files # therefore _get_file_names is wrapped with tenacity - with page.expect_response(re.compile(r"files/metadata")): + with page.expect_response(re.compile(r"paths\?file_filter=")): page.get_by_test_id("nodeFilesBtn").click() output_file_names_found = _get_file_names(page) diff --git a/tests/e2e-playwright/tests/tip/conftest.py b/tests/e2e-playwright/tests/tip/conftest.py index b0d979921ed..094c8b8f78e 100644 --- a/tests/e2e-playwright/tests/tip/conftest.py +++ b/tests/e2e-playwright/tests/tip/conftest.py @@ -27,12 +27,12 @@ def _( def create_tip_plan_from_dashboard( find_and_start_tip_plan_in_dashboard: Callable[[str], None], create_new_project_and_delete: Callable[ - [tuple[RunningState], bool], dict[str, Any] + [tuple[RunningState], bool, str | None, str | None], dict[str, Any] ], ) -> Callable[[str], dict[str, Any]]: def _(plan_name_test_id: str) -> dict[str, Any]: find_and_start_tip_plan_in_dashboard(plan_name_test_id) expected_states = (RunningState.UNKNOWN,) - return create_new_project_and_delete(expected_states, press_open=False) + return create_new_project_and_delete(expected_states, False, None, None) return _ diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index 7d1561efc7b..de4488ac310 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -152,6 +152,10 @@ def test_classic_ti_plan( # noqa: PLR0915 page.wait_for_timeout(_ELECTRODE_SELECTOR_FLICKERING_WAIT_TIME) with log_context(logging.INFO, "Configure selector"): + assert ( + page.get_by_test_id("settingsForm_" + node_ids[0]).count() == 0 + ), "service settings should not be visible" + electrode_selector_iframe.get_by_test_id("TargetStructure_Selector").click() electrode_selector_iframe.get_by_test_id( "TargetStructure_Target_(Targets_combined) Hypothalamus" diff --git a/tests/e2e/jest.config.js b/tests/e2e/jest.config.js index ad2b7b1ed11..8c91e5eea9f 100644 --- a/tests/e2e/jest.config.js +++ b/tests/e2e/jest.config.js @@ -1,11 +1,13 @@ module.exports = { preset: "jest-puppeteer", verbose: true, - collectCoverage: true, + collectCoverage: false, coverageReporters: ["lcov", "text"], globals: { url: "http://127.0.0.1.nip.io:9081/", // For local testing, set your deployed url here apiVersion: 'v0/', ourTimeout: 40000, - } + }, + maxWorkers: 1, + maxConcurrency: 1 } diff --git a/tests/e2e/package-lock.json b/tests/e2e/package-lock.json index d5aaa2c02e9..c1b902569fd 100644 --- a/tests/e2e/package-lock.json +++ b/tests/e2e/package-lock.json @@ -30,11 +30,13 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "license": "MIT", "dependencies": { - "@babel/highlight": "^7.24.7", + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", "picocolors": "^1.0.0" }, "engines": { @@ -224,17 +226,19 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.7.tgz", - "integrity": "sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -248,35 +252,26 @@ } }, "node_modules/@babel/helpers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.7.tgz", - "integrity": "sha512-NlmJJtvcw72yRJRcnCmGvSi+3jDEg8qFu3z0AFoymmzLx5ERVWyzd9kVXr7Th9/8yIJi2Zc6av4Tqz3wFs8QWg==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.10.tgz", + "integrity": "sha512-UPYc3SauzZ3JGgj87GgZ89JVdC5dj0AoetR5Bw6wj4niittNyFh6+eOGonYvJ1ao6B8lEa3Q3klS7ADZ53bc5g==", + "license": "MIT", "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/template": "^7.26.9", + "@babel/types": "^7.26.10" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/highlight": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", - "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "node_modules/@babel/parser": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.10.tgz", + "integrity": "sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==", + "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.24.7", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "@babel/types": "^7.26.10" }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", - "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", "bin": { "parser": "bin/babel-parser.js" }, @@ -448,13 +443,14 @@ } }, "node_modules/@babel/template": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", - "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz", + "integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==", + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.26.9", + "@babel/types": "^7.26.9" }, "engines": { "node": ">=6.9.0" @@ -502,13 +498,13 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/@babel/types": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.7.tgz", - "integrity": "sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.10.tgz", + "integrity": "sha512-emqcG3vHrpxUKTrxcblR36dcrcoRDvKmnL/dCL6ZsHaShW80qxCAcNhzQZrpeM765VzEos+xOi4s+r4IXzTwdQ==", + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -4564,7 +4560,8 @@ "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" }, "node_modules/js-yaml": { "version": "3.14.1", @@ -5715,14 +5712,6 @@ "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==" }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "engines": { - "node": ">=4" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -6090,11 +6079,12 @@ } }, "@babel/code-frame": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", "requires": { - "@babel/highlight": "^7.24.7", + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", "picocolors": "^1.0.0" } }, @@ -6232,14 +6222,14 @@ } }, "@babel/helper-string-parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.7.tgz", - "integrity": "sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==" + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==" }, "@babel/helper-validator-identifier": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==" + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==" }, "@babel/helper-validator-option": { "version": "7.24.7", @@ -6247,30 +6237,22 @@ "integrity": "sha512-yy1/KvjhV/ZCL+SM7hBrvnZJ3ZuT9OuZgIJAGpPEToANvc3iM6iDvBnRjtElWibHU6n8/LPR/EjX9EtIEYO3pw==" }, "@babel/helpers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.7.tgz", - "integrity": "sha512-NlmJJtvcw72yRJRcnCmGvSi+3jDEg8qFu3z0AFoymmzLx5ERVWyzd9kVXr7Th9/8yIJi2Zc6av4Tqz3wFs8QWg==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.10.tgz", + "integrity": "sha512-UPYc3SauzZ3JGgj87GgZ89JVdC5dj0AoetR5Bw6wj4niittNyFh6+eOGonYvJ1ao6B8lEa3Q3klS7ADZ53bc5g==", "requires": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/template": "^7.26.9", + "@babel/types": "^7.26.10" } }, - "@babel/highlight": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", - "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "@babel/parser": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.10.tgz", + "integrity": "sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==", "requires": { - "@babel/helper-validator-identifier": "^7.24.7", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "@babel/types": "^7.26.10" } }, - "@babel/parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", - "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==" - }, "@babel/plugin-syntax-async-generators": { "version": "7.8.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", @@ -6384,13 +6366,13 @@ } }, "@babel/template": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", - "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz", + "integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==", "requires": { - "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.26.9", + "@babel/types": "^7.26.9" } }, "@babel/traverse": { @@ -6426,13 +6408,12 @@ } }, "@babel/types": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.7.tgz", - "integrity": "sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.10.tgz", + "integrity": "sha512-emqcG3vHrpxUKTrxcblR36dcrcoRDvKmnL/dCL6ZsHaShW80qxCAcNhzQZrpeM765VzEos+xOi4s+r4IXzTwdQ==", "requires": { - "@babel/helper-string-parser": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" } }, "@bcoe/v8-coverage": { @@ -10316,11 +10297,6 @@ "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==" }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==" - }, "to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", diff --git a/tests/e2e/portal/BIOS_VNS_Calibrator.js b/tests/e2e/portal/BIOS_VNS_Calibrator.js index e6959fedc85..db0aaabf943 100644 --- a/tests/e2e/portal/BIOS_VNS_Calibrator.js +++ b/tests/e2e/portal/BIOS_VNS_Calibrator.js @@ -28,10 +28,10 @@ async function runTutorial () { console.log("Workbench Data:", workbenchData); const BIOSIdViewer = workbenchData["nodeIds"][0]; await tutorial.waitForServices(workbenchData["studyId"], - [BIOSIdViewer], - startTimeout, - false - ); + [BIOSIdViewer], + startTimeout, + false + ); await tutorial.waitFor(5000, 'Some time for starting the service'); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); diff --git a/tests/e2e/requirements/requirements.txt b/tests/e2e/requirements/requirements.txt index 2f743be8691..c5473cc7305 100644 --- a/tests/e2e/requirements/requirements.txt +++ b/tests/e2e/requirements/requirements.txt @@ -1,4 +1,4 @@ -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # requests diff --git a/tests/e2e/tests/register.js b/tests/e2e/tests/register.js new file mode 100644 index 00000000000..560a1e535fe --- /dev/null +++ b/tests/e2e/tests/register.js @@ -0,0 +1,75 @@ +const auto = require('../utils/auto'); +const utils = require('../utils/utils'); + + +const { + user, + pass +} = utils.getUserAndPass(); + +module.exports = { + registerAndLogOut: () => { + describe('Register and LogOut', () => { + const firstHandler = async response => { + if (response.url().endsWith("/config")) { + try { + const respStatus = response.status(); + expect(respStatus).toBe(200); + const responseBody = await response.json(); + expect(responseBody.data["invitation_required"]).toBeFalsy(); + } catch (e) { + console.log("Puppeteer error", e); + } + } else if (response.url().endsWith("/register")) { + try { + const respStatus = response.status(); + expect(respStatus).toBe(200); + } catch (e) { + console.log("Puppeteer error", e); + } + } + } + + const secondHandler = response => { + if (response.url().endsWith("/login")) { + try { + const respStatus = response.status(); + expect(respStatus).toBe(200); + } catch (e) { + console.log("Puppeteer error", e); + } + } else if (response.url().endsWith("/me")) { + try { + const respStatus = response.status(); + expect(respStatus).toBe(200); + } catch (e) { + console.log("Puppeteer error", e); + } + } else if (response.url().endsWith("/logout")) { + expect(response.status()).toBe(200); + } + } + + beforeAll(async () => { + console.log("Start:", new Date().toUTCString()); + + await page.goto(url); + }, ourTimeout); + + afterAll(async () => { + page.off('response', firstHandler); + page.off('response', secondHandler); + + console.log("End:", new Date().toUTCString()); + }) + + test('Register and Log Out', async () => { + page.on('response', firstHandler); + await auto.register(page, user, pass); + page.on('response', secondHandler); + await auto.logOut(page); + await page.waitFor(5000); + }, ourTimeout); + }); + } +} diff --git a/tests/e2e/tests/register.test.js b/tests/e2e/tests/register.test.js deleted file mode 100644 index 65bf701bd82..00000000000 --- a/tests/e2e/tests/register.test.js +++ /dev/null @@ -1,71 +0,0 @@ -const auto = require('../utils/auto'); -const utils = require('../utils/utils'); - -const { - user, - pass -} = utils.getUserAndPass(); - -const firstHandler = async response => { - if (response.url().endsWith("/config")) { - try { - const respStatus = response.status(); - expect(respStatus).toBe(200); - const responseBody = await response.json(); - expect(responseBody.data["invitation_required"]).toBeFalsy(); - } - catch (e) { - console.log("Pptr error", e); - } - } - else if (response.url().endsWith("/register")) { - try { - const respStatus = response.status(); - expect(respStatus).toBe(200); - } - catch (e) { - console.log("Pptr error", e); - } - } -} - -const secondHandler = response => { - if (response.url().endsWith("/login")) { - try { - const respStatus = response.status(); - expect(respStatus).toBe(200); - } - catch (e) { - console.log("Pptr error", e); - } - } - else if (response.url().endsWith("/me")) { - try { - const respStatus = response.status(); - expect(respStatus).toBe(200); - } - catch (e) { - console.log("Pptr error", e); - } - } - else if (response.url().endsWith("/logout")) { - expect(response.status()).toBe(200); - } -} - -beforeAll(async () => { - await page.goto(url); -}, ourTimeout); - -afterAll(async () => { - page.off('response', firstHandler); - page.off('response', secondHandler); -}) - -test('Register and Log Out', async () => { - page.on('response', firstHandler); - await auto.register(page, user, pass); - page.on('response', secondHandler); - await auto.logOut(page); - await page.waitFor(5000); -}, ourTimeout); diff --git a/tests/e2e/tests/startupCalls.js b/tests/e2e/tests/startupCalls.js new file mode 100644 index 00000000000..78d080c1367 --- /dev/null +++ b/tests/e2e/tests/startupCalls.js @@ -0,0 +1,102 @@ +const auto = require('../utils/auto'); +const utils = require('../utils/utils'); + +module.exports = { + startupCalls: () => { + describe('Calls after logging in', () => { + const { + user, + pass + } = utils.getUserAndPass(); + + const responses = { + me: null, + tags: null, + tasks: null, + uiConfig: null, + studies: null, + templates: null, + services: null, + }; + + beforeAll(async () => { + console.log("Start:", new Date().toUTCString()); + + page.on('response', response => { + const url = response.url(); + if (url.endsWith('/me')) { + responses.me = response.json(); + } else if (url.endsWith('/tags')) { + responses.tags = response.json(); + } else if (url.endsWith('/tasks')) { + responses.tasks = response.json(); + } else if (url.endsWith('/ui')) { + responses.uiConfig = response.json(); + } else if (url.includes('projects?type=user')) { + responses.studies = response.json(); + } else if (url.includes('projects?type=template')) { + responses.templates = response.json(); + } else if (url.includes('catalog/services/-/latest')) { + responses.services = response.json(); + } + }); + + await page.goto(url); + + console.log("Registering user"); + await auto.register(page, user, pass); + console.log("Registered"); + + await page.waitFor(10000); + }, ourTimeout); + + afterAll(async () => { + await auto.logOut(page); + + console.log("End:", new Date().toUTCString()); + }, ourTimeout); + + test('Profile', async () => { + const responseEnv = await responses.me; + expect(responseEnv.data["login"]).toBe(user); + }, ourTimeout); + + test('Tags', async () => { + const responseEnv = await responses.tags; + expect(Array.isArray(responseEnv.data)).toBeTruthy(); + }, ourTimeout); + + /* + test('Tasks', async () => { + const responseEnv = await responses.tasks; + expect(Array.isArray(responseEnv.data)).toBeTruthy(); + }, ourTimeout); + */ + + test('UI Config', async () => { + const responseEnv = await responses.uiConfig; + expect(responseEnv.data["productName"]).toBe("osparc"); + const uiConfig = responseEnv.data["ui"]; + const isObject = typeof uiConfig === 'object' && !Array.isArray(uiConfig) && uiConfig !== null; + expect(isObject).toBeTruthy(); + }, ourTimeout); + + test('Studies', async () => { + const responseEnv = await responses.studies; + expect(Array.isArray(responseEnv.data)).toBeTruthy(); + }, ourTimeout); + + test('Templates', async () => { + const responseEnv = await responses.templates; + expect(Array.isArray(responseEnv.data)).toBeTruthy(); + }, ourTimeout); + + test('Services', async () => { + const responseEnv = await responses.services; + expect(responseEnv.data._meta.total).toBeGreaterThan(0); + expect(Array.isArray(responseEnv.data.data)).toBeTruthy(); + expect(responseEnv.data.data.length).toBeGreaterThan(0); + }, ourTimeout); + }); + } +} diff --git a/tests/e2e/tests/startupCalls.test.js b/tests/e2e/tests/startupCalls.test.js deleted file mode 100644 index c9ce849056f..00000000000 --- a/tests/e2e/tests/startupCalls.test.js +++ /dev/null @@ -1,47 +0,0 @@ -const auto = require('../utils/auto'); -const utils = require('../utils/utils'); - -describe('Calls after logging in', () => { - const { - user, - pass - } = utils.getUserAndPass(); - - beforeAll(async () => { - await page.goto(url); - await auto.register(page, user, pass); - await page.waitFor(1000); - }, ourTimeout); - - afterAll(async () => { - await auto.logOut(page); - }, ourTimeout); - - test('Profile', async () => { - const responseEnv = await utils.fetchReq('me'); - expect(responseEnv.data["login"]).toBe(user); - }, ourTimeout); - - test('Studies', async () => { - const responseEnv = await utils.fetchReq('projects?type=user'); - expect(Array.isArray(responseEnv.data)).toBeTruthy(); - }, ourTimeout); - - test('Templates', async () => { - const responseEnv = await utils.fetchReq('projects?type=template'); - expect(Array.isArray(responseEnv.data)).toBeTruthy(); - }, ourTimeout); - - test('Services', async () => { - const responseEnv = await utils.fetchReq('catalog/services/-/latest'); - expect(responseEnv.data._meta.total).toBeGreaterThan(0); - expect(Array.isArray(responseEnv.data.data)).toBeTruthy(); - expect(responseEnv.data.data.length).toBeGreaterThan(0); - }, ourTimeout); - - test('Locations', async () => { - const responseEnv = await utils.fetchReq('storage/locations'); - expect(Array.isArray(responseEnv.data)).toBeTruthy(); - expect(responseEnv.data.length).toBeGreaterThan(0); - }, ourTimeout); -}); diff --git a/tests/e2e/tests/tags.js b/tests/e2e/tests/tags.js new file mode 100644 index 00000000000..0814f11d47d --- /dev/null +++ b/tests/e2e/tests/tags.js @@ -0,0 +1,159 @@ +const utils = require('../utils/utils'); +const auto = require('../utils/auto'); +const waitAndClick = require('../utils/utils').waitAndClick; + + +module.exports = { + testTags: () => { + describe('tags testing', () => { + const { + user, + pass, + } = utils.getUserAndPass(); + + const TAG_NAME = 'tag_test'; + const TAG_NAME_2 = 'tag_test_2'; + let studyId = null; + let tagId = null; + + /** + * This function records the IDs of the study and tag created in order to later remove them. + */ + const responseHandler = response => { + if (response.url().endsWith('/tags') && response.request().method() === 'POST') { + response.json() + .then(({ + data: { + id + } + }) => { + console.log("Tag created, id", id); + tagId = id; + }); + } + if (response.url().endsWith('/projects') && response.request().method() === 'POST') { + response.json() + .then(({ + data: { + uuid + } + }) => { + console.log("Study created, uuid", uuid); + studyId = uuid; + }); + } + } + + beforeAll(async () => { + page.on('response', responseHandler); + await page.goto(url); + await auto.register(page, user, pass); + // Create new study + const uiConfig = await page.evaluate(async () => await osparc.store.Products.getInstance().fetchUiConfig()); + if ("plusButton" in uiConfig) { + await waitAndClick(page, '[osparc-test-id="newPlusBtn"]'); + } + await waitAndClick(page, '[osparc-test-id="emptyStudyBtn"]'); + // Wait until project is created and Dashboard button is enabled + await utils.sleep(4000); + await auto.toDashboard(page); + }, ourTimeout * 2); + + afterAll(async () => { + // Cleaning + await page.evaluate(async function(studyId, tagId) { + await osparc.data.Resources.fetch('studies', 'delete', { + url: { + "studyId": studyId + } + }, studyId); + await osparc.data.Resources.fetch('tags', 'delete', { + url: { + tagId: tagId + } + }, tagId); + }, studyId, tagId); + page.off('response', responseHandler); + await auto.logOut(page); + }, ourTimeout); + + test('add a tag', async () => { + // Add a tag + await waitAndClick(page, '[osparc-test-id="userMenuBtn"]'); + await waitAndClick(page, '[osparc-test-id="userMenuPreferencesBtn"]'); + await waitAndClick(page, '[osparc-test-id="preferencesTagsTabBtn"]'); + await waitAndClick(page, '[osparc-test-id="addTagBtn"]'); + await utils.typeInInputElement(page, '[qxclass="osparc.form.tag.TagItem"]:last-of-type input[type="text"]', TAG_NAME); + await waitAndClick(page, '[qxclass="osparc.form.tag.TagItem"]:last-of-type [qxclass="osparc.ui.form.FetchButton"]'); + // Check tag was added + await page.waitForFunction(tagName => { + const el = document.querySelector( + '[qxclass="osparc.form.tag.TagItem"]:last-of-type [qxclass="osparc.ui.basic.Tag"]' + ); + return el && el.innerText === tagName; + }, {}, TAG_NAME); + // Close properties + await waitAndClick(page, '[osparc-test-id="preferencesWindowCloseBtn"]'); + }, ourTimeout); + + test('tag shows in filters', async () => { + // Check that tag shows in filter + await waitAndClick(page, '[osparc-test-id="searchBarFilter-textField-study"]'); + await waitAndClick(page, '[osparc-test-id="searchBarFilter-tags-button"]'); + const tagFilterMenu = await page.waitForSelector('[osparc-test-id="searchBarFilter-tags-menu"]:not([style*="display: none"])'); + expect(await tagFilterMenu.evaluate(el => el.innerText)).toContain(TAG_NAME); + }, ourTimeout); + + // wait until card gets unlocked. Tags will anyway be replaced by folder in the coming weeks + test.skip('assign tag and reflect changes', async () => { + await page.waitForSelector( + '[qxclass="osparc.dashboard.GridButtonItem"] > [qxclass="osparc.ui.basic.Thumbnail"]', + { + hidden: true + } + ); + // Assign to study + await waitAndClick(page, '[qxclass="osparc.dashboard.GridButtonItem"] [osparc-test-id="studyItemMenuButton"]'); + await waitAndClick(page, '[osparc-test-id="moreInfoBtn"]'); + await waitAndClick(page, '[osparc-test-id="editStudyEditTagsBtn"]'); + await waitAndClick(page, '[qxclass="osparc.form.tag.TagToggleButton"]'); + await waitAndClick(page, '[osparc-test-id="saveTagsBtn"]'); + // UI displays the change + let displayedTag = await page.waitForSelector('[qxclass="osparc.dashboard.GridButtonItem"] [qxclass="osparc.ui.basic.Tag"]') + await waitAndClick(page, '.qx-service-window[qxclass="osparc.ui.window.Window"] > .qx-workbench-small-cap-captionbar [qxclass="qx.ui.form.Button"]'); + expect(await displayedTag.evaluate(el => el.innerText)).toContain(TAG_NAME); + }, ourTimeout); + + // wait until card gets unlocked. Tags will anyway be replaced by folder in the coming weeks + test.skip('change tag and reflect changes', async () => { + // Change the tag + await waitAndClick(page, '[osparc-test-id="userMenuBtn"]'); + await waitAndClick(page, '[osparc-test-id="userMenuPreferencesBtn"]'); + await waitAndClick(page, '[osparc-test-id="preferencesTagsTabBtn"]'); + await waitAndClick(page, '[qxclass="osparc.form.tag.TagItem"] [qxclass="qx.ui.form.Button"]'); + await utils.clearInput(page, '[qxclass="osparc.form.tag.TagItem"] input[type="text"]'); + await utils.typeInInputElement(page, '[qxclass="osparc.form.tag.TagItem"] input[type="text"]', TAG_NAME_2); + await waitAndClick(page, '[qxclass="osparc.form.tag.TagItem"] [qxclass="osparc.ui.form.FetchButton"]'); + await page.waitForFunction(tagName => { + const el = document.querySelector( + '[qxclass="osparc.form.tag.TagItem"] [qxclass="osparc.ui.basic.Tag"]' + ); + return el && el.innerText === tagName; + }, {}, TAG_NAME_2); + // Close properties + await waitAndClick(page, '[osparc-test-id="preferencesWindowCloseBtn"]'); + // Check that tag name changed in filter and study list + await waitAndClick(page, '[osparc-test-id="searchBarFilter-textField-study"]'); + await waitAndClick(page, '[osparc-test-id="searchBarFilter-tags-button"]'); + const tagFilterMenu = await page.waitForSelector('[osparc-test-id="searchBarFilter-tags-menu"]:not([style*="display: none"])'); + expect(await tagFilterMenu.evaluate(el => el.innerText)).toContain(TAG_NAME_2); + await page.waitForFunction(tagName => { + const el = document.querySelector( + '[qxclass="osparc.dashboard.GridButtonItem"] [qxclass="osparc.ui.basic.Tag"]' + ); + return el && el.innerText === tagName; + }, {}, TAG_NAME_2); + }, ourTimeout); + }); + } +} diff --git a/tests/e2e/tests/tags.tes.js b/tests/e2e/tests/tags.tes.js deleted file mode 100644 index c7026220b88..00000000000 --- a/tests/e2e/tests/tags.tes.js +++ /dev/null @@ -1,153 +0,0 @@ -// OM rename this file and fix the test - -const utils = require('../utils/utils'); -const auto = require('../utils/auto'); -const waitAndClick = require('../utils/utils').waitAndClick; - -describe('tags testing', () => { - const { - user, - pass, - } = utils.getUserAndPass(); - - const TAG_NAME = 'tag_test'; - const TAG_NAME_2 = 'tag_test_2'; - let studyId = null; - let tagId = null; - - /** - * This function records the IDs of the study and tag created in order to later remove them. - */ - const responseHandler = response => { - if (response.url().endsWith('/tags') && response.request().method() === 'POST') { - response.json() - .then(({ - data: { - id - } - }) => { - console.log("Tag created, id", id); - tagId = id; - }); - } - if (response.url().endsWith('/projects') && response.request().method() === 'POST') { - response.json() - .then(({ - data: { - uuid - } - }) => { - console.log("Study created, uuid", uuid); - studyId = uuid; - }); - } - } - - beforeAll(async () => { - page.on('response', responseHandler); - await page.goto(url); - await auto.register(page, user, pass); - // Create new study - await waitAndClick(page, '[osparc-test-id="newPlusBtn"]'); - await waitAndClick(page, '[osparc-test-id="emptyStudyBtn"]'); - // Wait until project is created and Dashboard button is enabled - await utils.sleep(4000); - await auto.toDashboard(page); - }, ourTimeout * 2); - - afterAll(async () => { - // Cleaning - await page.evaluate(async function(studyId, tagId) { - await osparc.data.Resources.fetch('studies', 'delete', { - url: { - "studyId": studyId - } - }, studyId); - await osparc.data.Resources.fetch('tags', 'delete', { - url: { - tagId: tagId - } - }, tagId); - }, studyId, tagId); - page.off('response', responseHandler); - await auto.logOut(page); - }, ourTimeout); - - test('add a tag', async () => { - // Add a tag - await waitAndClick(page, '[osparc-test-id="userMenuBtn"]'); - await waitAndClick(page, '[osparc-test-id="userMenuPreferencesBtn"]'); - await waitAndClick(page, '[osparc-test-id="preferencesTagsTabBtn"]'); - await waitAndClick(page, '[osparc-test-id="addTagBtn"]'); - await utils.typeInInputElement(page, '[qxclass="osparc.form.tag.TagItem"]:last-of-type input[type="text"]', TAG_NAME); - await waitAndClick(page, '[qxclass="osparc.form.tag.TagItem"]:last-of-type [qxclass="osparc.ui.form.FetchButton"]'); - // Check tag was added - await page.waitForFunction(tagName => { - const el = document.querySelector( - '[qxclass="osparc.form.tag.TagItem"]:last-of-type [qxclass="osparc.ui.basic.Tag"]' - ); - return el && el.innerText === tagName; - }, {}, TAG_NAME); - // Close properties - await waitAndClick(page, '[osparc-test-id="preferencesWindowCloseBtn"]'); - }, ourTimeout); - - test('tag shows in filters', async () => { - // Check that tag shows in filter - await waitAndClick(page, '[osparc-test-id="searchBarFilter-textField-study"]'); - await waitAndClick(page, '[osparc-test-id="searchBarFilter-tags-button"]'); - const tagFilterMenu = await page.waitForSelector('[osparc-test-id="searchBarFilter-tags-menu"]:not([style*="display: none"])'); - expect(await tagFilterMenu.evaluate(el => el.innerText)).toContain(TAG_NAME); - }, ourTimeout); - - // wait until card gets unlocked. Tags will anyway be replaced by folder in the coming weeks - test.skip('assign tag and reflect changes', async () => { - await page.waitForSelector( - '[qxclass="osparc.dashboard.GridButtonItem"] > [qxclass="osparc.ui.basic.Thumbnail"]', - { - hidden: true - } - ); - // Assign to study - await waitAndClick(page, '[qxclass="osparc.dashboard.GridButtonItem"] [osparc-test-id="studyItemMenuButton"]'); - await waitAndClick(page, '[osparc-test-id="moreInfoBtn"]'); - await waitAndClick(page, '[osparc-test-id="editStudyEditTagsBtn"]'); - await waitAndClick(page, '[qxclass="osparc.form.tag.TagToggleButton"]'); - await waitAndClick(page, '[osparc-test-id="saveTagsBtn"]'); - // UI displays the change - let displayedTag = await page.waitForSelector('[qxclass="osparc.dashboard.GridButtonItem"] [qxclass="osparc.ui.basic.Tag"]') - await waitAndClick(page, '.qx-service-window[qxclass="osparc.ui.window.Window"] > .qx-workbench-small-cap-captionbar [qxclass="qx.ui.form.Button"]'); - expect(await displayedTag.evaluate(el => el.innerText)).toContain(TAG_NAME); - }, ourTimeout); - - // wait until card gets unlocked. Tags will anyway be replaced by folder in the coming weeks - test.skip('change tag and reflect changes', async () => { - // Change the tag - await waitAndClick(page, '[osparc-test-id="userMenuBtn"]'); - await waitAndClick(page, '[osparc-test-id="userMenuPreferencesBtn"]'); - await waitAndClick(page, '[osparc-test-id="preferencesTagsTabBtn"]'); - await waitAndClick(page, '[qxclass="osparc.form.tag.TagItem"] [qxclass="qx.ui.form.Button"]'); - await utils.clearInput(page, '[qxclass="osparc.form.tag.TagItem"] input[type="text"]'); - await utils.typeInInputElement(page, '[qxclass="osparc.form.tag.TagItem"] input[type="text"]', TAG_NAME_2); - await waitAndClick(page, '[qxclass="osparc.form.tag.TagItem"] [qxclass="osparc.ui.form.FetchButton"]'); - await page.waitForFunction(tagName => { - const el = document.querySelector( - '[qxclass="osparc.form.tag.TagItem"] [qxclass="osparc.ui.basic.Tag"]' - ); - return el && el.innerText === tagName; - }, {}, TAG_NAME_2); - // Close properties - await waitAndClick(page, '[osparc-test-id="preferencesWindowCloseBtn"]'); - // Check that tag name changed in filter and study list - await waitAndClick(page, '[osparc-test-id="searchBarFilter-textField-study"]'); - await waitAndClick(page, '[osparc-test-id="searchBarFilter-tags-button"]'); - const tagFilterMenu = await page.waitForSelector('[osparc-test-id="searchBarFilter-tags-menu"]:not([style*="display: none"])'); - expect(await tagFilterMenu.evaluate(el => el.innerText)).toContain(TAG_NAME_2); - await page.waitForFunction(tagName => { - const el = document.querySelector( - '[qxclass="osparc.dashboard.GridButtonItem"] [qxclass="osparc.ui.basic.Tag"]' - ); - return el && el.innerText === tagName; - }, {}, TAG_NAME_2); - }, ourTimeout); -}); diff --git a/tests/e2e/tests/testsToRunSequentially.test.js b/tests/e2e/tests/testsToRunSequentially.test.js new file mode 100644 index 00000000000..b7f989e28bc --- /dev/null +++ b/tests/e2e/tests/testsToRunSequentially.test.js @@ -0,0 +1,10 @@ +const { checkUrl } = require('./url.js'); +const { checkMetadata } = require('./title'); +const { startupCalls } = require('./startupCalls'); + + +describe('Sequentially run tests', () => { + checkUrl(); + checkMetadata(); + startupCalls(); +}); diff --git a/tests/e2e/tests/title.js b/tests/e2e/tests/title.js new file mode 100644 index 00000000000..4f16b285eb7 --- /dev/null +++ b/tests/e2e/tests/title.js @@ -0,0 +1,42 @@ +const appMetadata = require('../../../services/static-webserver/client/scripts/apps_metadata.json') + +module.exports = { + checkMetadata: () => { + describe('Check Metadata', () => { + beforeAll(async () => { + console.log("Start:", new Date().toUTCString()); + + await page.goto(url); + }, ourTimeout); + + afterAll(() => { + console.log("End:", new Date().toUTCString()); + }, ourTimeout); + + test('Check site metadata', async () => { + const title = await page.title(); + expect(title).toContain("PARC"); + + // oSPARC ([0]) is the product served by default + const replacements = appMetadata["applications"][0]["replacements"]; + + const description = await page.$$eval("head > meta[name='description']", descriptions => { + return descriptions[0].content; + }); + expect(description).toBe(replacements["replace_me_og_description"]); + + // Open Graph metadata + const ogTitle = await page.$$eval("head > meta[property='og:title']", ogTitles => { + return ogTitles[0].content; + }); + expect(ogTitle).toBe(replacements["replace_me_og_title"]); + + const ogDescription = await page.$$eval("head > meta[property='og:description']", ogDescriptions => { + return ogDescriptions[0].content; + }); + expect(ogDescription).toBe(replacements["replace_me_og_description"]); + + }, 20000); + }); + } +} diff --git a/tests/e2e/tests/title.test.js b/tests/e2e/tests/title.test.js deleted file mode 100644 index dc5c5e0e9f9..00000000000 --- a/tests/e2e/tests/title.test.js +++ /dev/null @@ -1,30 +0,0 @@ -const appMetadata = require('../../../services/static-webserver/client/scripts/apps_metadata.json') - -beforeAll(async () => { - await page.goto(url); -}, ourTimeout); - -test('Check site title', async () => { - const title = await page.title(); - expect(title).toBe("oSPARC"); - - // oSPARC ([0]) is the product served by default - const replacements = appMetadata["applications"][0]["replacements"]; - - const description = await page.$$eval("head > meta[name='description']", descriptions => { - return descriptions[0].content; - }); - expect(description).toBe(replacements["replace_me_og_description"]); - - // Open Graph metadata - const ogTitle = await page.$$eval("head > meta[property='og:title']", ogTitles => { - return ogTitles[0].content; - }); - expect(ogTitle).toBe(replacements["replace_me_og_title"]); - - const ogDescription = await page.$$eval("head > meta[property='og:description']", ogDescriptions => { - return ogDescriptions[0].content; - }); - expect(ogDescription).toBe(replacements["replace_me_og_description"]); - -}, 20000); diff --git a/tests/e2e/tests/url.js b/tests/e2e/tests/url.js new file mode 100644 index 00000000000..d44092edf20 --- /dev/null +++ b/tests/e2e/tests/url.js @@ -0,0 +1,20 @@ +module.exports = { + checkUrl: () => { + describe('Check URL', () => { + beforeAll(async () => { + console.log("Start:", new Date().toUTCString()); + + await page.goto(url); + }, ourTimeout); + + afterAll(async () => { + console.log("End:", new Date().toUTCString()); + }, ourTimeout); + + test('Check site url', async () => { + const url2 = page.url(); + expect(url2).toBe(url); + }, 20000); + }); + } +} diff --git a/tests/e2e/tests/url.test.js b/tests/e2e/tests/url.test.js deleted file mode 100644 index 13380177eeb..00000000000 --- a/tests/e2e/tests/url.test.js +++ /dev/null @@ -1,8 +0,0 @@ -beforeAll(async () => { - await page.goto(url); -}, ourTimeout); - -test('Check site url', async () => { - const url2 = page.url(); - expect(url2).toBe(url); -}, 20000); diff --git a/tests/e2e/tutorials/tutorialBase.js b/tests/e2e/tutorials/tutorialBase.js index 6899d3bcb26..4739f1f7c33 100644 --- a/tests/e2e/tutorials/tutorialBase.js +++ b/tests/e2e/tutorials/tutorialBase.js @@ -26,6 +26,7 @@ class TutorialBase { this.__responsesQueue = null; this.__services = null; + this.__studyId = null; this.__interval = null; @@ -216,8 +217,9 @@ class TutorialBase { let resp = null; try { resp = await this.__responsesQueue.waitUntilResponse(":open"); - } - catch (err) { + const studyId = this.__studyId = resp["data"]["uuid"]; + console.log("Study ID:", studyId); + } catch (err) { console.error("Error:", this.__templateName, "could not be started", err); throw (err); } @@ -234,10 +236,9 @@ class TutorialBase { await auto.dashboardNewTIPlan(this.__page); await this.__responsesQueue.waitUntilResponse("projects?from_study="); resp = await this.__responsesQueue.waitUntilResponse(":open"); - const studyId = resp["data"]["uuid"]; + const studyId = this.__studyId = resp["data"]["uuid"]; console.log("Study ID:", studyId); - } - catch (err) { + } catch (err) { console.error(`Error: Classic TI could not be started:\n`, err); throw (err); } @@ -254,10 +255,9 @@ class TutorialBase { await this.waitFor(2000); await auto.dashboardStartSim4LifeLite(this.__page); resp = await this.__responsesQueue.waitUntilResponse(":open"); - const studyId = resp["data"]["uuid"]; + const studyId = this.__studyId = resp["data"]["uuid"]; console.log("Study ID:", studyId); - } - catch (err) { + } catch (err) { console.error(`Error: Sim4Life Lite could not be started:\n`, err); throw (err); } @@ -274,10 +274,9 @@ class TutorialBase { await this.__goTo(); resp = await this.__responsesQueue.waitUntilResponse(":open", openStudyTimeout); await this.__printMe(); - const studyId = resp["data"]["uuid"]; + const studyId = this.__studyId = resp["data"]["uuid"]; console.log("Study ID:", studyId); - } - catch (err) { + } catch (err) { console.error("Error:", this.__templateName, "could not be started", err); throw (err); } @@ -294,10 +293,9 @@ class TutorialBase { assert(templateFound, "Expected template, got nothing. TIP: did you inject templates in database??") await this.__responsesQueue.waitUntilResponse("projects?from_study="); resp = await this.__responsesQueue.waitUntilResponse(":open"); - const studyId = resp["data"]["uuid"]; + const studyId = this.__studyId = resp["data"]["uuid"]; console.log("Study ID:", studyId); - } - catch (err) { + } catch (err) { console.error(`Error: "${this.__templateName}" template could not be started:\n`, err); throw (err); } @@ -314,10 +312,9 @@ class TutorialBase { const serviceFound = await auto.dashboardOpenService(this.__page, this.__templateName); assert(serviceFound, "Expected service, got nothing. TIP: is it available??"); resp = await this.__responsesQueue.waitUntilResponse(":open"); - const studyId = resp["data"]["uuid"]; + const studyId = this.__studyId = resp["data"]["uuid"]; console.log("Study ID:", studyId); - } - catch (err) { + } catch (err) { console.error(`Error: "${this.__templateName}" service could not be started:\n`, err); throw (err); } @@ -448,24 +445,26 @@ class TutorialBase { } async openNodeFiles(nodeId) { - this.__responsesQueue.addResponseListener("storage/locations/0/files/metadata?uuid_filter=" + nodeId); + const pathFilter = `${this.__studyId}/${nodeId}`; + const path = "storage/locations/0/paths?file_filter=" + pathFilter; + this.__responsesQueue.addResponseListener(path); await auto.openNodeFiles(this.__page); try { - await this.__responsesQueue.waitUntilResponse("storage/locations/0/files/metadata?uuid_filter=" + nodeId); - } - catch (err) { + await this.__responsesQueue.waitUntilResponse(path); + } catch (err) { console.error("Error: open node files", err); throw (err); } } async openNodeFilesAppMode(nodeId) { - this.__responsesQueue.addResponseListener("storage/locations/0/files/metadata?uuid_filter=" + nodeId); + const pathFilter = `${this.__studyId}/${nodeId}`; + const path = "storage/locations/0/paths?file_filter=" + pathFilter; + this.__responsesQueue.addResponseListener(path); await auto.openNodeFilesAppMode(this.__page); try { - await this.__responsesQueue.waitUntilResponse("storage/locations/0/files/metadata?uuid_filter=" + nodeId); - } - catch (err) { + await this.__responsesQueue.waitUntilResponse(path); + } catch (err) { console.error("Error: open node files", err); throw (err); } @@ -484,6 +483,7 @@ class TutorialBase { async __checkNItemsInFolder(fileNames, openOutputsFolder = false) { await this.takeScreenshot("checkNodeOutputs_before"); + await this.waitAndClick("folderGridView"); console.log("N items in folder. Expected:", fileNames); if (openOutputsFolder) { const itemTexts = await this.__page.$$eval('[osparc-test-id="FolderViewerItem"]', @@ -506,8 +506,7 @@ class TutorialBase { } if (outputsFound) { await this.takeScreenshot("outputs_folder"); - } - else { + } else { throw ("outputs folder not found"); } } @@ -532,8 +531,7 @@ class TutorialBase { const nodeId = await auto.openNode(this.__page, nodePos); await this.openNodeFiles(nodeId); await this.__checkNItemsInFolder(fileNames, openOutputsFolder); - } - catch (err) { + } catch (err) { console.error("Error: Checking Node Outputs:", err); throw (err) } @@ -543,8 +541,7 @@ class TutorialBase { try { await this.openNodeFilesAppMode(nodeId); await this.__checkNItemsInFolder(fileNames, openOutputsFolder); - } - catch (err) { + } catch (err) { console.error("Error: Checking Node Outputs:", err); throw (err) } diff --git a/tests/e2e/utils/auto.js b/tests/e2e/utils/auto.js index 4e999201736..f9a723059d5 100644 --- a/tests/e2e/utils/auto.js +++ b/tests/e2e/utils/auto.js @@ -107,14 +107,24 @@ async function __dashboardServicesBrowser(page) { async function dashboardNewTIPlan(page) { console.log("Creating New Plan"); - await utils.waitAndClick(page, '[osparc-test-id="newPlansBtn"]'); + const uiConfig = await page.evaluate(async () => await osparc.store.Products.getInstance().fetchUiConfig()); + if ("newStudies" in uiConfig) { + await utils.waitAndClick(page, '[osparc-test-id="newPlansBtn"]'); + } else if ("plusButton" in uiConfig) { + await utils.waitAndClick(page, '[osparc-test-id="newPlusBtn"]'); + } + await utils.waitAndClick(page, '[osparc-test-id="newTIPlanButton"]'); } async function dashboardStartSim4LifeLite(page) { - console.log("Start Sim4Lite from + button"); + console.log("Start Sim4Life-Lite from + button"); + + const uiConfig = await page.evaluate(async () => await osparc.store.Products.getInstance().fetchUiConfig()); + if ("plusButton" in uiConfig) { + await utils.waitAndClick(page, '[osparc-test-id="newPlusBtn"]'); + } - await utils.waitAndClick(page, '[osparc-test-id="newPlansBtn"]'); await utils.waitAndClick(page, '[osparc-test-id="startS4LButton"]'); } @@ -363,6 +373,7 @@ async function openNodeFilesAppMode(page) { async function checkDataProducedByNode(page, nFiles = 1) { console.log("checking Data produced by Node. Expecting", nFiles, "file(s)"); + await utils.waitAndClick(page, '[osparc-test-id="folderGridView"]'); const iconsContent = await page.waitForSelector('[osparc-test-id="FolderViewerIconsContent"]', { timeout: 5000 }); diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index db4a250991f..f4424462554 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -22,7 +22,7 @@ pydantic==2.10.6 # -r requirements/requirements.in pydantic-core==2.27.2 # via pydantic -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/requirements.in # pytest-asyncio diff --git a/tests/public-api/requirements/_base.txt b/tests/public-api/requirements/_base.txt index 7f83cf918c8..79d2ca83c91 100644 --- a/tests/public-api/requirements/_base.txt +++ b/tests/public-api/requirements/_base.txt @@ -2,7 +2,7 @@ annotated-types==0.7.0 # via pydantic anyio==4.8.0 # via httpx -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -54,8 +54,10 @@ pydantic-core==2.27.2 # via pydantic pydantic-extra-types==2.10.2 # via -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/settings-library/requirements/_base.in # osparc pygments==2.19.1 @@ -80,7 +82,7 @@ tenacity==9.0.0 # via osparc tqdm==4.67.1 # via osparc -typer==0.15.1 +typer==0.15.2 # via -r requirements/../../../packages/settings-library/requirements/_base.in typing-extensions==4.12.2 # via diff --git a/tests/public-api/requirements/_test.txt b/tests/public-api/requirements/_test.txt index eb7c698c622..dec101da080 100644 --- a/tests/public-api/requirements/_test.txt +++ b/tests/public-api/requirements/_test.txt @@ -1,8 +1,8 @@ aiodocker==0.24.0 # via -r requirements/_test.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in @@ -16,7 +16,7 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt # httpcore @@ -26,7 +26,7 @@ charset-normalizer==3.4.1 # via requests docker==7.1.0 # via -r requirements/_test.in -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in frozenlist==1.5.0 # via @@ -60,11 +60,11 @@ packaging==24.2 # via pytest pluggy==1.5.0 # via pytest -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -72,8 +72,6 @@ pytest-asyncio==0.23.8 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via faker python-dotenv==1.0.1 # via -r requirements/_test.in pyyaml==6.0.2 @@ -87,20 +85,18 @@ referencing==0.35.1 # jsonschema-specifications requests==2.32.3 # via docker -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing -six==1.17.0 - # via python-dateutil sniffio==1.3.1 # via anyio tenacity==9.0.0 # via -r requirements/_test.in typing-extensions==4.12.2 - # via - # anyio - # faker + # via anyio +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # -c requirements/../../../requirements/constraints.txt diff --git a/tests/public-api/requirements/_tools.txt b/tests/public-api/requirements/_tools.txt index 60e06fee8ab..0ce723bfa57 100644 --- a/tests/public-api/requirements/_tools.txt +++ b/tests/public-api/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -43,7 +43,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -54,7 +54,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -65,9 +65,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -76,7 +76,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wheel==0.45.1 # via pip-tools diff --git a/tests/swarm-deploy/conftest.py b/tests/swarm-deploy/conftest.py index b6f221c7c80..debb3529a2d 100644 --- a/tests/swarm-deploy/conftest.py +++ b/tests/swarm-deploy/conftest.py @@ -111,7 +111,7 @@ def simcore_stack_deployed_services( # logs table like # ID NAME IMAGE NODE DESIRED STATE CURRENT STATE ERROR # xbrhmaygtb76 simcore_sidecar.1 itisfoundation/sidecar:latest crespo-wkstn Running Running 53 seconds ago - # zde7p8qdwk4j simcore_rabbit.1 itisfoundation/rabbitmq:3.11.2-management crespo-wkstn Running Running 59 seconds ago + # zde7p8qdwk4j simcore_rabbit.1 itisfoundation/rabbitmq:3.13.7-management crespo-wkstn Running Running 59 seconds ago # f2gxmhwq7hhk simcore_postgres.1 postgres:10.10 crespo-wkstn Running Running about a minute ago # 1lh2hulxmc4q simcore_director.1 itisfoundation/director:latest crespo-wkstn Running Running 34 seconds ago # ... diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 43a246d3b3b..c1172331baf 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -1,4 +1,4 @@ -aio-pika==9.5.4 +aio-pika==9.5.5 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -22,9 +22,9 @@ aiofiles==24.1.0 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via aiohttp -aiohttp==3.11.11 +aiohttp==3.11.13 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -87,7 +87,7 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -certifi==2024.12.14 +certifi==2025.1.31 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -125,7 +125,7 @@ click==8.1.8 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/_test.in # typer -deprecated==1.2.17 +deprecated==1.2.18 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc @@ -141,11 +141,11 @@ email-validator==2.2.0 # via pydantic exceptiongroup==1.2.2 # via aio-pika -faker==35.0.0 +faker==36.1.1 # via -r requirements/_test.in fast-depends==2.4.12 # via faststream -faststream==0.5.34 +faststream==0.5.35 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -157,7 +157,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.68.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -187,7 +187,7 @@ jsonschema==4.23.0 # -r requirements/_test.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.8 +mako==1.3.9 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -228,7 +228,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.29.0 +opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -243,19 +243,19 @@ opentelemetry-api==1.29.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.29.0 +opentelemetry-exporter-otlp==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.29.0 +opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.29.0 +opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.29.0 +opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.50b0 +opentelemetry-instrumentation==0.51b0 # via # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-asyncpg @@ -263,36 +263,36 @@ opentelemetry-instrumentation==0.50b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aiopg==0.50b0 +opentelemetry-instrumentation-aiopg==0.51b0 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in -opentelemetry-instrumentation-asyncpg==0.50b0 +opentelemetry-instrumentation-asyncpg==0.51b0 # via -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-dbapi==0.50b0 +opentelemetry-instrumentation-dbapi==0.51b0 # via opentelemetry-instrumentation-aiopg -opentelemetry-instrumentation-logging==0.50b0 +opentelemetry-instrumentation-logging==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.50b0 +opentelemetry-instrumentation-redis==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.50b0 +opentelemetry-instrumentation-requests==0.51b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.29.0 +opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.29.0 +opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.50b0 +opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg @@ -300,7 +300,7 @@ opentelemetry-semantic-conventions==0.50b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.50b0 +opentelemetry-util-http==0.51b0 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -359,7 +359,7 @@ platformdirs==4.3.6 # via pint pluggy==1.5.0 # via pytest -propcache==0.2.1 +propcache==0.3.0 # via # aiohttp # yarl @@ -367,7 +367,7 @@ protobuf==5.29.3 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.1 +psutil==7.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -375,6 +375,8 @@ psycopg2-binary==2.9.10 # via # aiopg # sqlalchemy +pycryptodome==3.21.0 + # via stream-zip pydantic==2.10.6 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -449,8 +451,33 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.7.1 +pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -465,7 +492,7 @@ pyinstrument==5.0.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -509,9 +536,7 @@ pytest-runner==6.0.1 pytest-sugar==1.0.0 # via -r requirements/_test.in python-dateutil==2.9.0.post0 - # via - # arrow - # faker + # via arrow python-dotenv==1.0.1 # via # -r requirements/_test.in @@ -616,7 +641,7 @@ rich==13.9.4 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.22.3 +rpds-py==0.23.1 # via # jsonschema # referencing @@ -657,6 +682,10 @@ sqlalchemy==1.4.54 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic +stream-zip==0.0.83 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt @@ -675,7 +704,7 @@ tqdm==4.67.1 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.15.1 +typer==0.15.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -689,7 +718,6 @@ typing-extensions==4.12.2 # aiodebug # alembic # anyio - # faker # faststream # flexcache # flexparser @@ -699,6 +727,8 @@ typing-extensions==4.12.2 # pydantic-core # pydantic-extra-types # typer +tzdata==2025.1 + # via faker urllib3==2.3.0 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index 20c4de73711..891fdf1892c 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.8 # via pylint -black==24.10.0 +black==25.1.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools @@ -19,15 +19,15 @@ distlib==0.3.9 # via virtualenv filelock==3.17.0 # via virtualenv -identify==2.6.6 +identify==2.6.8 # via pre-commit -isort==5.13.2 +isort==6.0.1 # via # -r requirements/../../../requirements/devenv.txt # pylint mccabe==0.7.0 # via pylint -mypy==1.14.1 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -42,7 +42,7 @@ packaging==24.2 # build pathspec==0.12.1 # via black -pip==25.0 +pip==25.0.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt @@ -54,7 +54,7 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.3 +pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -66,9 +66,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.3 +ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.0 +setuptools==75.8.2 # via pip-tools tomlkit==0.13.2 # via pylint @@ -76,7 +76,7 @@ typing-extensions==4.12.2 # via # -c requirements/_test.txt # mypy -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in